data <- readRDS(file="../../data/sub_portugal_5provs_4blocks.rds")
Height data from a provenance trial (in Portugal) of maritime pine saplings.
Randomized block design. Here I selected 5 provenances and 4 blocks.
Saplings have different ages: 11, 15, 20 and 27 month old.
table(data$prov,data$block)
##
## 34 35 36 38
## LEI 78 71 72 82
## MIM 44 45 60 60
## PIE 26 29 34 34
## SAC 21 20 18 21
## VAL 37 43 42 42
table(data$prov,as.factor(data$age))
##
## 11 15 20 27
## LEI 92 84 65 62
## MIM 72 59 40 38
## PIE 36 33 27 27
## SAC 23 23 17 17
## VAL 48 44 37 35
randoms <- c("block","prov","clon","tree")
for (i in randoms){data[,i] <- as.factor( data[,i])}
data <- data %>% mutate(age.sc=(age-mean(age))/sd(age))
data %>% ggplot(aes(x=height)) + geom_histogram(color="darkblue", fill="lightblue") + theme_bw()
data %>% ggplot(aes(x=height, color=as.factor(age))) + geom_histogram(fill="white", alpha=0.5, position="identity") + theme_bw() +
facet_wrap(~as.factor(age)) + theme(legend.position = "none")
plot_grid(data %>% ggplot(aes(x=age,y=height)) + geom_point(alpha=0.2) +
stat_smooth(method = "lm", col = "red") + theme_bw() +
theme(axis.title=element_text(size=16)),
data %>% ggplot(aes(x=age,y=height)) + geom_point(alpha=0.2) +
stat_smooth(method = "lm", col = "red", formula = y~poly(x,2)) + theme_bw() +
theme(axis.title=element_text(size=16)))
plot_grid(data %>% ggplot(aes(x=age,y=log(height))) + geom_point(alpha=0.2) +
stat_smooth(method = "lm", col = "red") + theme_bw() +
theme(axis.title=element_text(size=16)),
data %>% ggplot(aes(x=age,y=log(height))) + geom_point(alpha=0.2) +
stat_smooth(method = "lm", col = "red", formula = y~poly(x,2)) + theme_bw() +
theme(axis.title=element_text(size=16)))
data %>% ggplot(aes(x=height, color=prov)) + geom_histogram(fill="white", alpha=0.5, position="identity") + theme_bw() +
facet_wrap(~prov) + theme(legend.position = "none")
data %>% ggplot(aes(x=height, color=block)) + geom_histogram(fill="white", alpha=0.5, position="identity") + theme_bw() +
facet_wrap(~block) + theme(legend.position = "none")
Dummy variables for each level = Regularized intercepts, because we use weakly informative priors. But no information shared between intercepts.
P299 in Statistical Rethinking of McElreath.
Comment about the choice of HalfCauchy prior for \(\sigma\)
\(\sigma\) is strictly positive, what priors can we use?
Very weakly informative prior: \(\sigma \sim \text{HalfCauchy}(0,25)\). From Gelman (2006): 8-schools example (p430). And here.
Weakly informative prior:
\(\sigma \sim \text{HalfCauchy}(0,1)\) (McElreath, First version) \(\sigma \sim \text{HalfCauchy}(0,5)\) (Betancourt in 8-schools example)
\(\sigma \sim \text{exponential}(1)\) (McElreath, Second version) or \(\sigma \sim \text{exponential}(0.1)\)
\(\sigma \sim \text{LogNormal}(0,1)\) (McElreath, Second version)
More informative prior : \(\sigma \sim \text{HalfNormal}(0,1)\) or \(\sigma \sim \text{Half-t}(3,0,1)\)
data.list <- list(N=length(data$height), # Number of observations
y=data$height, # Response variables
age=data$age.sc, # Tree age
nprov=length(unique(data$prov)), # Number of provenances
nblock=length(unique(data$block)), # Number of blocks
prov=as.numeric(data$prov), # Provenances
bloc=as.numeric(data$block)) # Blocks
Model with sigma ~ HalfCauchy(0,1)
mod1_halfcauchy0_1 = stan_model("mod1_halfcauchy0_1.stan")
fit.mod1_halfcauchy0_1 <- sampling(mod1_halfcauchy0_1, data = data.list, iter = 2000, chains = 2, cores = 2)
print(fit.mod1_halfcauchy0_1, pars = c("beta_age","alpha_prov","alpha_block", "sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod1_halfcauchy0_1.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 75.41 0.13 7.81 65.32 75.45 85.38 3597 1
## alpha_prov[1] 82.63 0.16 9.12 71.16 82.58 94.09 3395 1
## alpha_prov[2] 57.36 0.16 9.53 45.34 57.50 69.51 3637 1
## alpha_prov[3] 34.55 0.16 9.30 22.85 34.47 46.52 3579 1
## alpha_prov[4] 26.45 0.15 9.89 13.30 26.51 39.21 4190 1
## alpha_prov[5] 47.94 0.18 9.48 35.74 47.71 60.15 2835 1
## alpha_block[1] 53.88 0.16 9.36 41.99 53.71 65.96 3594 1
## alpha_block[2] 56.60 0.17 9.18 44.76 56.76 68.85 3074 1
## alpha_block[3] 64.83 0.19 9.62 52.74 64.86 77.08 2631 1
## alpha_block[4] 73.33 0.18 9.87 60.35 73.57 85.66 2930 1
## sigma_y 301.04 0.26 10.26 288.03 300.56 314.24 1585 1
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 16:53:28 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
Model with sigma ~ HalfCauchy(0,25)
mod1_halfcauchy0_25 = stan_model("mod1_halfcauchy0_25.stan")
fit.mod1_halfcauchy0_25 <- sampling(mod1_halfcauchy0_25, data = data.list, iter = 2000, chains = 2, cores = 2)
print(fit.mod1_halfcauchy0_25, pars = c("beta_age","alpha_prov","alpha_block", "sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod1_halfcauchy0_25.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 75.38 0.15 8.00 65.31 75.26 85.78 2680 1
## alpha_prov[1] 82.43 0.17 9.49 70.27 82.48 94.94 3137 1
## alpha_prov[2] 57.48 0.17 9.41 45.46 57.64 69.35 3038 1
## alpha_prov[3] 34.51 0.16 9.51 22.02 34.74 46.46 3534 1
## alpha_prov[4] 26.27 0.17 9.69 13.63 26.41 38.43 3410 1
## alpha_prov[5] 47.96 0.17 9.80 35.34 47.84 60.23 3438 1
## alpha_block[1] 53.91 0.16 9.45 42.06 53.82 66.07 3326 1
## alpha_block[2] 57.01 0.17 9.30 45.12 56.86 69.30 3110 1
## alpha_block[3] 64.67 0.17 9.66 52.62 64.59 77.22 3055 1
## alpha_block[4] 73.36 0.19 9.79 61.16 73.51 85.77 2657 1
## sigma_y 300.86 0.28 10.70 287.21 300.66 314.25 1423 1
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 16:54:12 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
Posterior predictive checks
y_rep <- as.matrix(fit.mod1_halfcauchy0_1, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
Let’s vectorize this model.
mod1_vectorized = stan_model("mod1_vectorized.stan")
fit.mod1_vectorized <- sampling(mod1_vectorized, data = data.list, iter = 2000, chains = 2, cores = 2)
print(fit.mod1_vectorized, pars = c("beta_age","alpha_prov","alpha_block", "sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod1_vectorized.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 75.28 0.16 7.40 65.84 75.19 84.81 2234 1
## alpha_prov[1] 82.50 0.19 9.65 69.89 82.44 94.85 2588 1
## alpha_prov[2] 57.51 0.20 9.47 45.21 57.66 69.37 2226 1
## alpha_prov[3] 34.22 0.18 9.68 21.87 34.08 46.78 2814 1
## alpha_prov[4] 26.27 0.17 9.79 13.36 26.44 38.71 3221 1
## alpha_prov[5] 47.81 0.18 9.92 34.79 48.00 60.22 3145 1
## alpha_block[1] 53.38 0.17 9.68 41.23 53.51 65.91 3082 1
## alpha_block[2] 56.49 0.19 9.35 44.29 56.50 68.59 2376 1
## alpha_block[3] 64.46 0.17 9.70 51.97 64.59 76.84 3259 1
## alpha_block[4] 72.87 0.21 9.46 60.61 72.99 84.94 2128 1
## sigma_y 301.49 0.28 10.67 287.97 301.20 315.36 1439 1
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 16:54:57 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
Another way of vectorizing the model.
mod1_vectorized2 = stan_model("mod1_vectorized2.stan")
fit.mod1_vectorized2 <- sampling(mod1_vectorized2, data = data.list, iter = 2000, chains = 2, cores = 2)
print(fit.mod1_vectorized2, pars = c("beta_age","alpha_prov","alpha_block", "sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod1_vectorized2.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 75.52 0.15 7.80 65.73 75.59 85.36 2817 1
## alpha_prov[1] 82.55 0.18 9.44 69.88 82.63 94.87 2853 1
## alpha_prov[2] 57.43 0.18 9.60 45.16 57.54 69.76 2955 1
## alpha_prov[3] 34.68 0.16 9.34 22.54 34.59 46.55 3463 1
## alpha_prov[4] 26.20 0.14 9.46 13.98 26.35 38.18 4848 1
## alpha_prov[5] 48.20 0.16 9.20 36.33 48.32 60.48 3462 1
## alpha_block[1] 54.02 0.15 9.27 42.55 53.99 65.56 3753 1
## alpha_block[2] 56.90 0.16 9.35 44.88 56.89 68.84 3458 1
## alpha_block[3] 64.67 0.15 9.23 52.72 64.69 76.54 3704 1
## alpha_block[4] 73.15 0.18 9.12 61.89 72.99 84.83 2561 1
## sigma_y 300.90 0.24 10.08 288.50 300.63 314.34 1828 1
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 16:55:40 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
Let’s compare the speed of these four models.
get_elapsed_time(fit.mod1_halfcauchy0_25)
## warmup sample
## chain:1 2.57108 0.347243
## chain:2 2.74658 0.358630
get_elapsed_time(fit.mod1_halfcauchy0_1)
## warmup sample
## chain:1 2.43302 0.326342
## chain:2 2.33127 0.331056
get_elapsed_time(fit.mod1_vectorized)
## warmup sample
## chain:1 3.03049 0.367305
## chain:2 3.01369 0.403298
get_elapsed_time(fit.mod1_vectorized2)
## warmup sample
## chain:1 3.01181 0.282574
## chain:2 2.79074 0.287144
mod1_lognormal = stan_model("mod1_lognormal.stan")
fit.mod1_lognormal <- sampling(mod1_lognormal, data = data.list, iter = 2000, chains = 2, cores = 2)
## Warning: There were 269 transitions after warmup that exceeded the maximum treedepth. Increase max_treedepth above 10. See
## http://mc-stan.org/misc/warnings.html#maximum-treedepth-exceeded
## Warning: Examine the pairs() plot to diagnose sampling problems
print(fit.mod1_lognormal, pars = c("beta_age","alpha_prov","alpha_block", "sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod1_lognormal.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.48 809 1.00
## alpha_prov[1] 2.81 0.21 3.50 -1.80 2.85 7.12 266 1.01
## alpha_prov[2] 2.75 0.21 3.50 -1.85 2.79 7.05 266 1.01
## alpha_prov[3] 2.65 0.21 3.50 -1.94 2.68 6.95 267 1.01
## alpha_prov[4] 2.76 0.21 3.50 -1.83 2.81 7.07 267 1.01
## alpha_prov[5] 2.77 0.21 3.50 -1.82 2.80 7.05 266 1.01
## alpha_block[1] 3.04 0.21 3.50 -1.27 3.00 7.64 266 1.01
## alpha_block[2] 3.07 0.21 3.49 -1.21 3.04 7.67 266 1.01
## alpha_block[3] 3.09 0.21 3.50 -1.22 3.05 7.68 267 1.01
## alpha_block[4] 3.19 0.21 3.50 -1.09 3.14 7.81 266 1.01
## sigma_y 0.40 0.00 0.01 0.39 0.40 0.41 866 1.01
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 16:57:09 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod1_lognormal, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
mod1_2_lognormal = stan_model("mod1_2_lognormal.stan")
fit.mod1_2_lognormal <- sampling(mod1_2_lognormal, data = data.list, iter = 2000, chains = 2, cores = 2)
print(fit.mod1_2_lognormal, pars = c("beta_age","alpha_prov","alpha_block", "sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod1_2_lognormal.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.48 1158 1
## alpha_prov[1] 2.64 0.02 0.32 2.22 2.65 3.05 345 1
## alpha_prov[2] 2.59 0.02 0.32 2.18 2.59 2.99 347 1
## alpha_prov[3] 2.48 0.02 0.32 2.07 2.49 2.89 354 1
## alpha_prov[4] 2.60 0.02 0.32 2.19 2.60 2.99 363 1
## alpha_prov[5] 2.60 0.02 0.32 2.18 2.60 3.00 347 1
## alpha_block[1] 3.20 0.02 0.32 2.80 3.20 3.61 351 1
## alpha_block[2] 3.23 0.02 0.32 2.83 3.23 3.64 354 1
## alpha_block[3] 3.25 0.02 0.32 2.85 3.25 3.67 355 1
## alpha_block[4] 3.35 0.02 0.32 2.94 3.35 3.76 347 1
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 1488 1
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 16:58:01 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod1_2_lognormal, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
mod1_3_lognormal = stan_model("mod1_3_lognormal.stan")
fit.mod1_3_lognormal <- sampling(mod1_3_lognormal, data = data.list, iter = 2000, chains = 2, cores = 2)
print(fit.mod1_3_lognormal, pars = c("beta_age","alpha_prov","alpha_block", "sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod1_3_lognormal.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.48 865 1
## alpha_prov[1] 2.68 0.02 0.33 2.27 2.69 3.07 362 1
## alpha_prov[2] 2.63 0.02 0.33 2.22 2.64 3.01 368 1
## alpha_prov[3] 2.52 0.02 0.33 2.12 2.54 2.92 362 1
## alpha_prov[4] 2.64 0.02 0.33 2.22 2.65 3.02 363 1
## alpha_prov[5] 2.64 0.02 0.33 2.24 2.65 3.03 366 1
## alpha_block[1] 3.16 0.02 0.33 2.78 3.16 3.57 366 1
## alpha_block[2] 3.19 0.02 0.33 2.81 3.19 3.60 368 1
## alpha_block[3] 3.21 0.02 0.33 2.82 3.20 3.62 364 1
## alpha_block[4] 3.31 0.02 0.33 2.92 3.30 3.72 365 1
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 1276 1
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 16:58:53 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod1_3_lognormal, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
data.list_logy <- list(N=length(data$height), # Number of observations
y=log(data$height), # Response variables
age=data$age.sc, # Tree age
nprov=length(unique(data$prov)), # Number of provenances
nblock=length(unique(data$block)), # Number of blocks
prov=as.numeric(data$prov), # Provenances
bloc=as.numeric(data$block)) # Blocks
mod1_logy = stan_model("mod1_logy.stan")
fit.mod1log <- sampling(mod1_logy, data = data.list_logy, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
## Warning: Bulk Effective Samples Size (ESS) is too low, indicating posterior means and medians may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#bulk-ess
print(fit.mod1log, pars = c("beta_age","beta_age2","alpha_prov","alpha_block", "sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod1_logy.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.48 750 1.00
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 594 1.01
## alpha_prov[1] 2.54 0.36 3.49 -2.09 2.60 7.05 94 1.03
## alpha_prov[2] 2.49 0.36 3.49 -2.12 2.56 6.99 94 1.03
## alpha_prov[3] 2.38 0.36 3.49 -2.24 2.43 6.88 95 1.03
## alpha_prov[4] 2.50 0.36 3.49 -2.11 2.56 7.01 94 1.03
## alpha_prov[5] 2.50 0.36 3.49 -2.12 2.55 7.00 94 1.03
## alpha_block[1] 3.31 0.36 3.49 -1.18 3.25 7.94 95 1.03
## alpha_block[2] 3.34 0.36 3.49 -1.15 3.30 7.96 95 1.03
## alpha_block[3] 3.35 0.36 3.49 -1.15 3.31 7.98 95 1.03
## alpha_block[4] 3.46 0.36 3.49 -1.06 3.39 8.08 95 1.03
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 685 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:00:20 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod1log, pars = "y_rep")
ppc_dens_overlay(y =log(data$height),y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
mod1_logy2 = stan_model("mod1_logy2.stan")
fit.mod1log2 <- sampling(mod1_logy2, data = data.list_logy, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
print(fit.mod1log2, pars = c("beta_age","beta_age2","alpha_prov","alpha_block", "sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod1_logy2.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.48 1022 1
## beta_age2 -0.08 0.00 0.02 -0.11 -0.08 -0.06 909 1
## alpha_prov[1] 2.68 0.02 0.33 2.25 2.68 3.11 455 1
## alpha_prov[2] 2.63 0.02 0.33 2.20 2.63 3.05 458 1
## alpha_prov[3] 2.53 0.02 0.33 2.09 2.52 2.94 457 1
## alpha_prov[4] 2.64 0.02 0.33 2.20 2.63 3.06 459 1
## alpha_prov[5] 2.64 0.02 0.33 2.21 2.64 3.06 458 1
## alpha_block[1] 3.16 0.02 0.33 2.74 3.16 3.59 455 1
## alpha_block[2] 3.19 0.02 0.33 2.78 3.19 3.62 457 1
## alpha_block[3] 3.21 0.02 0.33 2.79 3.21 3.64 457 1
## alpha_block[4] 3.31 0.02 0.33 2.89 3.31 3.74 454 1
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 1160 1
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:01:10 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod1log2, pars = "y_rep")
ppc_dens_overlay(y =log(data$height),y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
mod1_logy3 = stan_model("mod1_logy3.stan")
fit.mod1log3 <- sampling(mod1_logy3, data = data.list_logy, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
print(fit.mod1log3, pars = c("beta_age","beta_age2","alpha_prov","alpha_block", "sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod1_logy3.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.48 818 1.00
## beta_age2 -0.08 0.00 0.02 -0.11 -0.08 -0.06 977 1.00
## alpha_prov[1] 2.63 0.02 0.32 2.23 2.62 3.07 237 1.02
## alpha_prov[2] 2.58 0.02 0.32 2.19 2.56 3.01 237 1.02
## alpha_prov[3] 2.47 0.02 0.32 2.08 2.46 2.90 239 1.02
## alpha_prov[4] 2.58 0.02 0.33 2.18 2.57 3.03 239 1.02
## alpha_prov[5] 2.59 0.02 0.32 2.19 2.57 3.02 241 1.02
## alpha_block[1] 3.21 0.02 0.32 2.77 3.23 3.61 241 1.02
## alpha_block[2] 3.24 0.02 0.32 2.81 3.26 3.64 236 1.02
## alpha_block[3] 3.26 0.02 0.32 2.82 3.27 3.65 240 1.02
## alpha_block[4] 3.36 0.02 0.32 2.92 3.37 3.76 241 1.02
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 1229 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:02:00 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
Adaptive regularization
Links:
P357 McElreath (first version).
\[\begin{equation} \begin{aligned} h_{i} & \sim \text{LogNormal}(\mu_{i},\sigma_{i})\\ \mu_{i} & = \beta_{age}age_{i} + \beta_{age2}age^{2}_{i} + \alpha_{PROV[p]}\\ \alpha_{PROV} & \sim \mathcal{N}(\mu_{\alpha_{PROV}},\sigma_{\alpha_{PROV}})\\ \beta_{age} & \sim \mathcal{N}(0,1) \\ \beta_{age2} & \sim \mathcal{N}(0,1)\\ \mu_{\alpha_{PROV}} & \sim \mathcal{N}(0,1)\\ \sigma_{\alpha_{PROV}} & \sim \text{HalfCauchy}(0,1)\\ \sigma & \sim \text{HalfCauchy}(0,1) \end{aligned} \end{equation}\]data.list_mod2_1 <- list(N=length(data$height), # Number of observations
y=data$height, # Response variables
age=data$age.sc, # Tree age
nprov=length(unique(data$prov)), # Number of provenances
prov=as.numeric(data$prov)) # Provenances
mod2_1 = stan_model("mod2_1.stan")
fit.mod2_1 <- sampling(mod2_1, data = data.list_mod2_1, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
## Warning: There were 14 divergent transitions after warmup. Increasing adapt_delta above 0.8 may help. See
## http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## Warning: Examine the pairs() plot to diagnose sampling problems
## Warning: The largest R-hat is 1.13, indicating chains have not mixed.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#r-hat
## Warning: Bulk Effective Samples Size (ESS) is too low, indicating posterior means and medians may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#bulk-ess
## Warning: Tail Effective Samples Size (ESS) is too low, indicating posterior variances and tail quantiles may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#tail-ess
print(fit.mod2_1, pars = c("beta_age","beta_age2","alpha_prov", "sigma_y","mean_alpha_prov","sigma_alpha_prov"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_1.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.49 1087 1.00
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 696 1.00
## alpha_prov[1] 5.90 0.00 0.03 5.86 5.90 5.94 643 1.00
## alpha_prov[2] 5.86 0.00 0.03 5.82 5.86 5.90 923 1.00
## alpha_prov[3] 5.78 0.00 0.04 5.72 5.78 5.83 206 1.02
## alpha_prov[4] 5.86 0.00 0.04 5.80 5.86 5.91 1403 1.00
## alpha_prov[5] 5.86 0.00 0.03 5.82 5.86 5.90 1138 1.00
## sigma_y 0.40 0.00 0.01 0.39 0.40 0.41 1725 1.00
## mean_alpha_prov 5.24 0.65 1.62 2.04 5.84 5.89 6 1.34
## sigma_alpha_prov 0.71 0.68 1.82 0.03 0.07 3.34 7 1.28
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:02:48 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod2_1, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
posterior <- as.array(fit.mod2_1)
np <- nuts_params(fit.mod2_1)
mcmc_trace(posterior, pars = "alpha_prov[3]", np = np) +
xlab("Post-warmup iteration")
mcmc_pairs(posterior, np = np, pars = c("mean_alpha_prov","sigma_alpha_prov","alpha_prov[3]","beta_age","beta_age2"),
off_diag_args = list(size = 1, alpha = 1/3),np_style = pairs_style_np(div_size=1, div_shape = 19))
Why is this model poorly fitted?
Four points (four ages) is not enough to estimate a second degree polynomial? I tried to remove \(\beta_{age2}\) but it hasn’t changed much. This has reduced the number of divergent transitions. (to 11 divergent transitions). But R-hat was still very high. In particular, for \(\mu_{\alpha_{PROV}}\) (R-hat = 1.53 !).
Too vague priors?
mod2_1_otherpriors = stan_model("mod2_1_otherpriors.stan")
fit.mod2_1_otherpriors <- sampling(mod2_1_otherpriors, data = data.list_mod2_1, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
## Warning: There were 37 divergent transitions after warmup. Increasing adapt_delta above 0.8 may help. See
## http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## Warning: Examine the pairs() plot to diagnose sampling problems
## Warning: Tail Effective Samples Size (ESS) is too low, indicating posterior variances and tail quantiles may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#tail-ess
print(fit.mod2_1_otherpriors, pars = c("beta_age","beta_age2","alpha_prov", "sigma_y","mean_alpha_prov","sigma_alpha_prov"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_1_otherpriors.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.47 0 0.02 0.44 0.47 0.49 720 1.00
## beta_age2 -0.09 0 0.02 -0.11 -0.09 -0.07 767 1.00
## alpha_prov[1] 5.90 0 0.03 5.86 5.90 5.93 827 1.00
## alpha_prov[2] 5.86 0 0.03 5.82 5.86 5.90 1008 1.00
## alpha_prov[3] 5.79 0 0.04 5.73 5.79 5.84 353 1.01
## alpha_prov[4] 5.86 0 0.04 5.81 5.86 5.91 1112 1.00
## alpha_prov[5] 5.86 0 0.03 5.82 5.86 5.91 1078 1.00
## sigma_y 0.40 0 0.01 0.39 0.40 0.41 1402 1.00
## mean_alpha_prov 5.85 0 0.05 5.80 5.85 5.90 550 1.00
## sigma_alpha_prov 0.07 0 0.06 0.03 0.06 0.13 406 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:03:42 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod2_1_otherpriors, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
posterior_cp <- as.array(fit.mod2_1_otherpriors)
np_cp <- nuts_params(fit.mod2_1_otherpriors)
mcmc_trace(posterior_cp, pars =c( "alpha_prov[3]","mean_alpha_prov"), np = np_cp) +
xlab("Post-warmup iteration")
This model is better. But still not ok, any suggestions? Let’s try the non-centered parametrization.
Links:
From McElreath, P429 (13.4.2.) of Statistical Rethinking (second version)
\[ \alpha \sim \mathcal{N}(\mu,\sigma)\]
is equivalent to
\[\begin{equation} \begin{aligned} \alpha &= \mu + \beta\\ \beta &\sim \mathcal{N}(0,\sigma) \end{aligned} \end{equation}\]is equivalent to
\[\begin{equation} \begin{aligned} \alpha &= \mu + z\sigma\\ z &\sim \mathcal{N}(0,1) \end{aligned} \end{equation}\]No parameters are left inside the prior.
From Updating: A Set of Bayesian Notes. Jeffrey B. Arnold. 20 Multilevel Models
These are two ways of writing the same model. However, they change the parameters that the HMC algorithm is actively sampling and thus can have different sampling performance.
However, neither is universally better.
And there is currently no ex-ante way to know which will work better, and at what amount of “data” that the performance of one or the other is better. However, one other reason to use the centered parameterization (if it is also scaled), is that the Stan HMC implementation tends to be more efficient if all parameters are on the scale.
\[\begin{equation} \begin{aligned} h_{i} & \sim \text{LogNormal}(\mu_{i},\sigma_{i})\\ \mu_{i} & = \alpha + \beta_{age}age_{i} + \beta_{age2}age^{2}_{i} + z_{PROV[p]}\sigma_{PROV}\\ \alpha & \sim \mathcal{N}(0,1) \\ \beta_{age} & \sim \mathcal{N}(0,1) \\ \beta_{age2} & \sim \mathcal{N}(0,1)\\ z_{PROV[p]} & \sim \mathcal{N}(0,1)\\ \sigma_{PROV} & \sim \text{HalfCauchy}(0,1)\\ \sigma & \sim \text{HalfCauchy}(0,1) \end{aligned} \end{equation}\]Non-centered model equation
mod2_1_nc = stan_model("mod2_1_nc.stan")
fit.mod2_1_nc <- sampling(mod2_1_nc, data = data.list_mod2_1, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
## Warning: There were 8 divergent transitions after warmup. Increasing adapt_delta above 0.8 may help. See
## http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## Warning: Examine the pairs() plot to diagnose sampling problems
## Warning: Tail Effective Samples Size (ESS) is too low, indicating posterior variances and tail quantiles may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#tail-ess
print(fit.mod2_1_nc, pars=c("beta_age","beta_age2","z_prov","sigma_prov","sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_1_nc.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.49 1245 1.00
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 935 1.00
## z_prov[1] 0.85 0.02 0.64 0.04 0.83 1.66 828 1.00
## z_prov[2] 0.17 0.02 0.63 -0.63 0.18 0.94 1209 1.00
## z_prov[3] -0.96 0.02 0.73 -1.94 -0.92 -0.05 969 1.01
## z_prov[4] 0.20 0.02 0.69 -0.66 0.19 1.08 1077 1.00
## z_prov[5] 0.23 0.02 0.64 -0.58 0.23 1.03 1129 1.00
## sigma_prov 0.08 0.00 0.05 0.03 0.06 0.14 175 1.01
## sigma_y 0.40 0.00 0.01 0.39 0.40 0.41 1660 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:04:33 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod2_1_nc, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
I think I’m doing something wrong here… Let’s try with more informative priors.
mod2_1_nc_otherpriors = stan_model("mod2_1_nc_otherpriors.stan")
fit.mod2_1_nc_otherpriors <- sampling(mod2_1_nc_otherpriors, data = data.list_mod2_1, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
## Warning: There were 2 divergent transitions after warmup. Increasing adapt_delta above 0.8 may help. See
## http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## Warning: Examine the pairs() plot to diagnose sampling problems
print(fit.mod2_1_nc_otherpriors, pars=c("beta_age","beta_age2","z_prov","sigma_prov","sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_1_nc_otherpriors.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.47 0.00 0.02 0.45 0.47 0.49 1053 1
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 1082 1
## z_prov[1] 0.85 0.02 0.62 0.10 0.81 1.65 1136 1
## z_prov[2] 0.15 0.02 0.61 -0.64 0.15 0.89 1036 1
## z_prov[3] -1.01 0.03 0.71 -1.94 -0.97 -0.14 780 1
## z_prov[4] 0.14 0.02 0.67 -0.72 0.17 0.98 1116 1
## z_prov[5] 0.22 0.02 0.62 -0.52 0.21 0.98 1291 1
## sigma_prov 0.07 0.00 0.05 0.03 0.06 0.13 392 1
## sigma_y 0.40 0.00 0.01 0.39 0.40 0.41 1469 1
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:05:31 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod2_1_nc_otherpriors, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
posterior_cp <- as.array(fit.mod2_1_nc_otherpriors)
np_cp <- nuts_params(fit.mod2_1_nc_otherpriors)
mcmc_trace(posterior_cp, pars =c( "alpha","sigma_prov"), np = np_cp) +
xlab("Post-warmup iteration")
mcmc_pairs(posterior_cp, np = np_cp, pars = c("sigma_y","sigma_prov","alpha","beta_age","beta_age2"),
off_diag_args = list(size = 1, alpha = 1/3),np_style = pairs_style_np(div_size=3, div_shape = 19))
A lot better!
data.list_mod2_2 <- list(N=length(data$height), # Number of observations
y=data$height, # Response variables
age=data$age.sc, # Tree age
nprov=length(unique(data$prov)), # Number of provenances
nblock=length(unique(data$block)), # Number of blocks
prov=as.numeric(data$prov), # Provenances
bloc=as.numeric(data$block)) # Blocks
mod2_2 = stan_model("mod2_2.stan")
fit.mod2_2 <- sampling(mod2_2, data = data.list_mod2_2, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
## Warning: There were 43 divergent transitions after warmup. Increasing adapt_delta above 0.8 may help. See
## http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## Warning: Examine the pairs() plot to diagnose sampling problems
## Warning: Bulk Effective Samples Size (ESS) is too low, indicating posterior means and medians may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#bulk-ess
## Warning: Tail Effective Samples Size (ESS) is too low, indicating posterior variances and tail quantiles may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#tail-ess
print(fit.mod2_2, pars = c("beta_age","beta_age2",
"alpha_prov","alpha_block",
"mean_alpha_prov","sigma_alpha_prov",
"mean_alpha_block","sigma_alpha_block",
"sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_2.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.48 847 1.00
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 838 1.00
## alpha_prov[1] 2.92 0.05 0.76 2.01 2.95 3.89 223 1.00
## alpha_prov[2] 2.88 0.05 0.76 1.95 2.89 3.84 222 1.00
## alpha_prov[3] 2.80 0.05 0.76 1.89 2.82 3.77 223 1.00
## alpha_prov[4] 2.88 0.05 0.76 1.97 2.91 3.84 224 1.00
## alpha_prov[5] 2.89 0.05 0.76 1.97 2.90 3.85 224 1.00
## alpha_block[1] 2.92 0.05 0.76 1.97 2.91 3.84 223 1.00
## alpha_block[2] 2.95 0.05 0.76 1.99 2.93 3.87 222 1.00
## alpha_block[3] 2.96 0.05 0.76 2.00 2.95 3.90 222 1.00
## alpha_block[4] 3.05 0.05 0.76 2.09 3.04 3.97 222 1.00
## mean_alpha_prov 2.87 0.05 0.76 1.95 2.88 3.83 225 1.00
## sigma_alpha_prov 0.08 0.00 0.06 0.03 0.07 0.14 460 1.00
## mean_alpha_block 2.85 0.06 0.76 1.91 2.90 3.77 144 1.00
## sigma_alpha_block 0.23 0.06 0.74 0.04 0.09 0.26 135 1.01
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 1423 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:06:49 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod2_2, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
McElreath: “[…] note that there is only one global mean parameter \(\alpha\), and both of the varying intercept parameters are centered at zero. We can’t identify a separate mean for each varying intercept type, because both intercepts are added to the same linear prediction. So it is conventional to define varying intercepts with a mean of zero, so there’s no risk of accidentally creating hard-to-identify parameters.” “If you do include a mean for each cluster type, it won’t be the end of the world, however.”
mod2_2_otherpriors = stan_model("mod2_2_otherpriors.stan")
fit.mod2_2_otherpriors <- sampling(mod2_2_otherpriors, data = data.list_mod2_2, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
## Warning: There were 72 divergent transitions after warmup. Increasing adapt_delta above 0.8 may help. See
## http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## Warning: Examine the pairs() plot to diagnose sampling problems
## Warning: Bulk Effective Samples Size (ESS) is too low, indicating posterior means and medians may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#bulk-ess
## Warning: Tail Effective Samples Size (ESS) is too low, indicating posterior variances and tail quantiles may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#tail-ess
print(fit.mod2_2_otherpriors, pars = c("beta_age","beta_age2",
"alpha_prov","alpha_block",
"mean_alpha_prov","sigma_alpha_prov",
"mean_alpha_block","sigma_alpha_block",
"sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_2_otherpriors.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.48 991 1.00
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 1088 1.00
## alpha_prov[1] 3.04 0.20 1.81 0.60 3.10 5.40 81 1.02
## alpha_prov[2] 2.99 0.20 1.81 0.58 3.05 5.35 81 1.02
## alpha_prov[3] 2.92 0.20 1.81 0.47 2.98 5.29 81 1.02
## alpha_prov[4] 3.00 0.20 1.81 0.56 3.05 5.36 81 1.02
## alpha_prov[5] 3.00 0.20 1.81 0.58 3.07 5.36 81 1.02
## alpha_block[1] 2.81 0.20 1.81 0.44 2.75 5.25 81 1.02
## alpha_block[2] 2.84 0.20 1.81 0.48 2.80 5.27 81 1.02
## alpha_block[3] 2.85 0.20 1.81 0.50 2.79 5.28 81 1.02
## alpha_block[4] 2.94 0.20 1.81 0.58 2.90 5.39 81 1.02
## mean_alpha_prov 2.99 0.20 1.81 0.54 3.05 5.36 81 1.02
## sigma_alpha_prov 0.08 0.00 0.06 0.03 0.06 0.14 586 1.00
## mean_alpha_block 2.86 0.20 1.81 0.50 2.78 5.30 80 1.02
## sigma_alpha_block 0.11 0.01 0.10 0.04 0.08 0.21 353 1.01
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 905 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:08:16 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
mod2_3 = stan_model("mod2_3.stan")
fit.mod2_3 <- sampling(mod2_3, data = data.list_mod2_2, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
## Warning: There were 2 transitions after warmup that exceeded the maximum treedepth. Increase max_treedepth above 14. See
## http://mc-stan.org/misc/warnings.html#maximum-treedepth-exceeded
## Warning: Examine the pairs() plot to diagnose sampling problems
## Warning: The largest R-hat is 1.11, indicating chains have not mixed.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#r-hat
## Warning: Bulk Effective Samples Size (ESS) is too low, indicating posterior means and medians may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#bulk-ess
## Warning: Tail Effective Samples Size (ESS) is too low, indicating posterior variances and tail quantiles may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#tail-ess
print(fit.mod2_3, pars = c("beta_age","beta_age2",
"alpha",
"alpha_prov","alpha_block",
"sigma_alpha_prov","sigma_alpha_block",
"sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_3.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.48 874 1.00
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 835 1.00
## alpha 1.85 0.59 1.95 -0.27 1.45 5.76 11 1.17
## alpha_prov[1] 0.05 0.00 0.05 0.00 0.05 0.10 875 1.00
## alpha_prov[2] 0.00 0.00 0.05 -0.04 0.00 0.06 1018 1.00
## alpha_prov[3] -0.07 0.00 0.05 -0.13 -0.06 -0.01 748 1.00
## alpha_prov[4] 0.01 0.00 0.05 -0.04 0.01 0.07 950 1.00
## alpha_prov[5] 0.01 0.00 0.05 -0.04 0.01 0.06 950 1.00
## alpha_block[1] 3.94 0.59 1.95 0.03 4.36 6.09 11 1.17
## alpha_block[2] 3.97 0.59 1.95 0.05 4.38 6.09 11 1.17
## alpha_block[3] 3.98 0.59 1.95 0.06 4.39 6.11 11 1.17
## alpha_block[4] 4.08 0.59 1.95 0.16 4.51 6.21 11 1.17
## sigma_alpha_prov 0.08 0.00 0.06 0.03 0.06 0.13 560 1.00
## sigma_alpha_block 4.31 0.59 2.80 0.15 4.19 7.56 22 1.08
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 1158 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:11:20 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod2_3, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
mod2_3_otherpriors = stan_model("mod2_3_otherpriors.stan")
fit.mod2_3_otherpriors <- sampling(mod2_3_otherpriors, data = data.list_mod2_2, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
print(fit.mod2_3_otherpriors, pars = c("beta_age","beta_age2",
"alpha",
"alpha_prov","alpha_block",
"sigma_alpha_prov","sigma_alpha_block",
"sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_3_otherpriors.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0 0.02 0.44 0.46 0.48 1315 1.00
## beta_age2 -0.09 0 0.02 -0.11 -0.09 -0.07 1164 1.00
## alpha 5.84 0 0.07 5.76 5.85 5.92 360 1.00
## alpha_prov[1] 0.05 0 0.05 0.00 0.04 0.10 394 1.02
## alpha_prov[2] 0.01 0 0.05 -0.04 0.00 0.06 410 1.02
## alpha_prov[3] -0.07 0 0.05 -0.13 -0.06 -0.01 525 1.01
## alpha_prov[4] 0.01 0 0.05 -0.04 0.01 0.06 483 1.02
## alpha_prov[5] 0.01 0 0.05 -0.04 0.01 0.06 444 1.02
## alpha_block[1] -0.04 0 0.06 -0.11 -0.04 0.03 309 1.02
## alpha_block[2] -0.02 0 0.06 -0.08 -0.02 0.05 298 1.02
## alpha_block[3] 0.00 0 0.06 -0.07 0.00 0.07 292 1.02
## alpha_block[4] 0.09 0 0.06 0.02 0.08 0.16 299 1.02
## sigma_alpha_prov 0.07 0 0.06 0.03 0.06 0.13 370 1.01
## sigma_alpha_block 0.10 0 0.08 0.04 0.08 0.19 681 1.00
## sigma_y 0.40 0 0.01 0.39 0.40 0.41 1709 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:12:09 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
\(\alpha\) is very different between the two models!
With more iterations:
fit.mod2_3_otherpriors_iter3000 <- sampling(mod2_3_otherpriors, data = data.list_mod2_2, iter = 3000, chains = 2, cores = 2, control=list(max_treedepth=14))
print(fit.mod2_3_otherpriors_iter3000, pars = c("beta_age","beta_age2",
"alpha",
"alpha_prov","alpha_block",
"sigma_alpha_prov","sigma_alpha_block",
"sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_3_otherpriors.
## 2 chains, each with iter=3000; warmup=1500; thin=1;
## post-warmup draws per chain=1500, total post-warmup draws=3000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0 0.02 0.44 0.46 0.48 2067 1
## beta_age2 -0.09 0 0.02 -0.11 -0.09 -0.07 1968 1
## alpha 5.85 0 0.08 5.77 5.85 5.93 657 1
## alpha_prov[1] 0.05 0 0.04 0.00 0.05 0.10 785 1
## alpha_prov[2] 0.01 0 0.04 -0.05 0.00 0.06 805 1
## alpha_prov[3] -0.07 0 0.05 -0.13 -0.06 -0.01 1030 1
## alpha_prov[4] 0.01 0 0.05 -0.05 0.01 0.07 886 1
## alpha_prov[5] 0.01 0 0.05 -0.04 0.01 0.07 813 1
## alpha_block[1] -0.05 0 0.07 -0.12 -0.04 0.02 671 1
## alpha_block[2] -0.02 0 0.07 -0.09 -0.02 0.04 674 1
## alpha_block[3] -0.01 0 0.07 -0.08 -0.01 0.06 654 1
## alpha_block[4] 0.08 0 0.07 0.01 0.08 0.15 681 1
## sigma_alpha_prov 0.08 0 0.05 0.03 0.06 0.13 1089 1
## sigma_alpha_block 0.11 0 0.09 0.04 0.08 0.20 989 1
## sigma_y 0.40 0 0.01 0.38 0.40 0.41 2533 1
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:12:24 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
mod2_4 = stan_model("mod2_4.stan")
fit.mod2_4 <- sampling(mod2_4, data = data.list_mod2_2, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
## Warning: There were 53 divergent transitions after warmup. Increasing adapt_delta above 0.8 may help. See
## http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## Warning: Examine the pairs() plot to diagnose sampling problems
## Warning: Tail Effective Samples Size (ESS) is too low, indicating posterior variances and tail quantiles may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#tail-ess
print(fit.mod2_4, pars = c("beta_age","beta_age2",
"alpha",
"z_prov","z_block",
"sigma_prov","sigma_block",
"sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_4.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.48 1407 1.00
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 1391 1.00
## alpha 5.75 0.02 0.28 5.63 5.82 5.90 131 1.02
## z_prov[1] 0.84 0.02 0.62 0.10 0.82 1.62 1094 1.00
## z_prov[2] 0.12 0.02 0.64 -0.66 0.15 0.92 1066 1.00
## z_prov[3] -0.98 0.03 0.75 -1.97 -0.95 -0.04 791 1.00
## z_prov[4] 0.21 0.02 0.69 -0.66 0.22 1.07 808 1.00
## z_prov[5] 0.22 0.02 0.62 -0.56 0.23 0.99 941 1.00
## z_block[1] -0.35 0.03 0.71 -1.26 -0.33 0.57 477 1.00
## z_block[2] -0.06 0.03 0.63 -0.85 -0.06 0.73 606 1.00
## z_block[3] 0.10 0.02 0.61 -0.67 0.11 0.87 654 1.00
## z_block[4] 1.15 0.02 0.66 0.35 1.11 2.01 888 1.00
## sigma_prov 0.08 0.00 0.09 0.03 0.06 0.15 514 1.01
## sigma_block 0.17 0.03 0.28 0.04 0.09 0.31 119 1.02
## sigma_y 0.40 0.00 0.01 0.39 0.40 0.41 1292 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:15:14 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod2_4, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
posterior_cp <- as.array(fit.mod2_4)
np_cp <- nuts_params(fit.mod2_4)
mcmc_trace(posterior_cp, pars = c("alpha", "sigma_block", "sigma_prov","z_block[3]"), np = np_cp) +
xlab("Post-warmup iteration")
mod2_4_otherpriors = stan_model("mod2_4_otherpriors.stan")
fit.mod2_4_otherpriors <- sampling(mod2_4_otherpriors, data = data.list_mod2_2, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14))
## Warning: There were 277 divergent transitions after warmup. Increasing adapt_delta above 0.8 may help. See
## http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## Warning: Examine the pairs() plot to diagnose sampling problems
## Warning: The largest R-hat is 1.18, indicating chains have not mixed.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#r-hat
## Warning: Bulk Effective Samples Size (ESS) is too low, indicating posterior means and medians may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#bulk-ess
## Warning: Tail Effective Samples Size (ESS) is too low, indicating posterior variances and tail quantiles may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#tail-ess
print(fit.mod2_4_otherpriors, pars = c("beta_age","beta_age2",
"alpha",
"z_prov","z_block",
"sigma_prov","sigma_block",
"sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_4_otherpriors.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.47 0.00 0.02 0.44 0.47 0.49 16 1.12
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 21 1.09
## alpha 5.78 0.05 0.15 5.54 5.83 5.92 10 1.26
## z_prov[1] 0.94 0.21 0.72 0.04 0.86 2.01 12 1.16
## z_prov[2] 0.11 0.05 0.59 -0.67 0.18 0.76 114 1.03
## z_prov[3] -0.82 0.17 0.83 -1.93 -0.77 0.38 25 1.10
## z_prov[4] 0.24 0.08 0.68 -0.64 0.37 0.89 65 1.05
## z_prov[5] 0.24 0.05 0.58 -0.53 0.32 0.86 138 1.02
## z_block[1] -0.48 0.13 0.64 -1.32 -0.48 0.29 24 1.09
## z_block[2] -0.19 0.07 0.59 -0.94 -0.14 0.41 81 1.03
## z_block[3] -0.05 0.05 0.53 -0.77 -0.01 0.53 129 1.03
## z_block[4] 0.89 0.06 0.71 0.17 0.70 1.90 125 1.02
## sigma_prov 0.12 0.04 0.14 0.03 0.06 0.33 11 1.26
## sigma_block 0.20 0.10 0.23 0.04 0.10 0.69 5 1.49
## sigma_y 0.40 0.00 0.01 0.39 0.40 0.41 358 1.01
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:16:10 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod2_4_otherpriors, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
posterior_cp <- as.array(fit.mod2_4_otherpriors)
np_cp <- nuts_params(fit.mod2_4_otherpriors)
mcmc_trace(posterior_cp, pars = c("alpha", "sigma_block", "sigma_prov","z_block[3]"), np = np_cp) +
xlab("Post-warmup iteration")
Let’s increase the target acceptance (adapt_delta=0.99)
McEleath (Second version) : “[…] the target acceptance rate is controlled by the adapt_delta control parameter. The default is 0.95, which means that it aims to attain a 95% acceptance rate. It tries this during the warmup phase, adjusting the step size of each leapfrog step (go back to Chapter 9 if these terms aren’t familiar). When adapt_delta is set high, it results in a smaller step size, which means a more accurate approximation of the curved surface. It also means more computation, which means a slower chain. Increasing adapt_delta can often, but not always, help with divergent transitions.”
fit.mod2_4_otherpriors_adaptdelta <- sampling(mod2_4_otherpriors, data = data.list_mod2_2, iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14,adapt_delta=0.99))
print(fit.mod2_4_otherpriors_adaptdelta, pars = c("beta_age","beta_age2",
"alpha",
"z_prov","z_block",
"sigma_prov","sigma_block",
"sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod2_4_otherpriors.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.02 0.44 0.46 0.48 1559 1
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 1519 1
## alpha 5.85 0.00 0.09 5.76 5.85 5.94 674 1
## z_prov[1] 0.81 0.02 0.65 -0.01 0.79 1.64 793 1
## z_prov[2] 0.05 0.02 0.63 -0.75 0.04 0.83 855 1
## z_prov[3] -1.09 0.02 0.73 -2.05 -1.06 -0.15 923 1
## z_prov[4] 0.10 0.02 0.70 -0.75 0.08 0.98 1316 1
## z_prov[5] 0.16 0.02 0.65 -0.66 0.14 1.00 1036 1
## z_block[1] -0.58 0.02 0.64 -1.44 -0.57 0.22 758 1
## z_block[2] -0.27 0.02 0.63 -1.06 -0.25 0.51 895 1
## z_block[3] -0.10 0.02 0.60 -0.84 -0.09 0.64 812 1
## z_block[4] 1.02 0.03 0.73 0.12 0.99 1.98 837 1
## sigma_prov 0.08 0.00 0.07 0.03 0.06 0.13 606 1
## sigma_block 0.11 0.00 0.10 0.04 0.08 0.21 545 1
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 1619 1
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:16:46 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
Longer to run, but it’s ok now ! And similar to model mod2_3_otherpriors (model with centered parameterization and more informative priors).
data.list_mod3 <- list(N=length(data$height), # Number of observations
y=data$height, # Response variables
age=data$age.sc, # Tree age
nprov=length(unique(data$prov)), # Number of provenances
nblock=length(unique(data$block)), # Number of blocks
prov=as.numeric(data$prov), # Provenances
bloc=as.numeric(data$block)) # Blocks
mod3_1In this model, I followed the example from here: Stan code of Statistical Rethinking. 13.3 Example: cross-classified chimpanzees with varying slopes.
Even with 99% acceptance rate (adapt_delta=0.99)` and 3000 iterations, the model had some divergent transitions and small sample sizes.
One thing I didn’t understand with this model code is: where are LKJ and \(\sigma_{\alpha_{BLOCK}}\) priors?
mod3_1 = stan_model("mod3_1.stan")
fit.mod3_1 <- sampling(mod3_1, data = data.list_mod3 , iter = 3000, chains = 2, cores = 2, control=list(max_treedepth=14,adapt_delta=0.99))
## Warning: There were 3 divergent transitions after warmup. Increasing adapt_delta above 0.99 may help. See
## http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## Warning: Examine the pairs() plot to diagnose sampling problems
## Warning: Tail Effective Samples Size (ESS) is too low, indicating posterior variances and tail quantiles may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#tail-ess
print(fit.mod3_1, probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod3_1.
## 2 chains, each with iter=3000; warmup=1500; thin=1;
## post-warmup draws per chain=1500, total post-warmup draws=3000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.47 0.00 0.03 0.44 0.47 0.50 655 1.01
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 1775 1.00
## alpha 5.85 0.00 0.09 5.76 5.85 5.93 364 1.01
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 1888 1.00
## sigma_block 0.11 0.00 0.11 0.04 0.08 0.20 623 1.00
## alpha_block[1] -0.05 0.00 0.07 -0.12 -0.04 0.02 330 1.01
## alpha_block[2] -0.02 0.00 0.07 -0.09 -0.02 0.04 328 1.01
## alpha_block[3] -0.01 0.00 0.07 -0.08 -0.01 0.06 346 1.01
## alpha_block[4] 0.08 0.00 0.07 0.01 0.08 0.15 358 1.01
## Rho_prov[1,1] 1.00 NaN 0.00 1.00 1.00 1.00 NaN NaN
## Rho_prov[1,2] -0.23 0.02 0.55 -0.90 -0.31 0.60 1098 1.00
## Rho_prov[2,1] -0.23 0.02 0.55 -0.90 -0.31 0.60 1098 1.00
## Rho_prov[2,2] 1.00 0.00 0.00 1.00 1.00 1.00 572 1.00
## sigma_prov[1] 0.09 0.00 0.07 0.03 0.07 0.15 558 1.00
## sigma_prov[2] 0.04 0.00 0.04 0.01 0.03 0.08 478 1.00
## alpha_prov[1] 0.05 0.00 0.05 0.00 0.05 0.11 561 1.00
## alpha_prov[2] 0.00 0.00 0.05 -0.05 0.00 0.06 578 1.00
## alpha_prov[3] -0.07 0.00 0.06 -0.14 -0.07 -0.01 602 1.00
## alpha_prov[4] 0.00 0.00 0.06 -0.05 0.00 0.07 642 1.00
## alpha_prov[5] 0.01 0.00 0.06 -0.04 0.01 0.07 622 1.00
## beta_prov[1] -0.02 0.00 0.03 -0.05 -0.01 0.01 615 1.01
## beta_prov[2] 0.00 0.00 0.03 -0.04 0.00 0.02 817 1.00
## beta_prov[3] 0.01 0.00 0.03 -0.02 0.01 0.05 974 1.00
## beta_prov[4] 0.02 0.00 0.03 -0.01 0.01 0.06 856 1.00
## beta_prov[5] -0.01 0.00 0.03 -0.05 -0.01 0.02 665 1.01
## v_prov[1,1] 0.05 0.00 0.05 0.00 0.05 0.11 561 1.00
## v_prov[1,2] -0.02 0.00 0.03 -0.05 -0.01 0.01 615 1.01
## v_prov[2,1] 0.00 0.00 0.05 -0.05 0.00 0.06 578 1.00
## v_prov[2,2] 0.00 0.00 0.03 -0.04 0.00 0.02 817 1.00
## v_prov[3,1] -0.07 0.00 0.06 -0.14 -0.07 -0.01 602 1.00
## v_prov[3,2] 0.01 0.00 0.03 -0.02 0.01 0.05 974 1.00
## v_prov[4,1] 0.00 0.00 0.06 -0.05 0.00 0.07 642 1.00
## v_prov[4,2] 0.02 0.00 0.03 -0.01 0.01 0.06 856 1.00
## v_prov[5,1] 0.01 0.00 0.06 -0.04 0.01 0.07 622 1.00
## v_prov[5,2] -0.01 0.00 0.03 -0.05 -0.01 0.02 665 1.01
## SRS_prov[1,1] 0.01 0.00 0.06 0.00 0.00 0.02 639 1.00
## SRS_prov[1,2] 0.00 0.00 0.01 0.00 0.00 0.00 1139 1.00
## SRS_prov[2,1] 0.00 0.00 0.01 0.00 0.00 0.00 1139 1.00
## SRS_prov[2,2] 0.00 0.00 0.01 0.00 0.00 0.01 704 1.00
## y_rep[1] 318.99 2.45 133.12 176.76 292.25 493.19 2961 1.00
## y_rep[2] 219.10 1.63 91.83 121.58 203.01 333.05 3156 1.00
## y_rep[3] 451.93 3.48 188.53 251.21 415.11 700.67 2933 1.00
## y_rep[4] 310.80 2.33 129.14 175.84 287.00 473.67 3064 1.00
## y_rep[5] 216.44 1.65 88.80 122.02 198.29 328.78 2913 1.00
## y_rep[6] 236.48 1.81 97.94 130.81 217.54 365.40 2929 1.00
## y_rep[7] 210.54 1.53 85.14 117.54 196.65 319.30 3096 1.00
## y_rep[8] 313.84 2.45 130.13 176.83 287.54 488.50 2832 1.00
## y_rep[9] 217.75 1.64 92.43 118.95 200.75 337.14 3187 1.00
## y_rep[10] 349.89 2.66 144.03 192.77 327.38 539.58 2935 1.00
## y_rep[11] 452.31 3.30 186.30 250.49 420.76 694.74 3189 1.00
## y_rep[12] 718.14 5.63 307.14 400.93 655.26 1109.42 2978 1.00
## y_rep[13] 324.29 2.42 133.11 181.33 303.50 502.20 3026 1.00
## y_rep[14] 219.23 1.68 89.90 121.55 202.68 337.43 2849 1.00
## y_rep[15] 317.72 2.60 134.06 176.18 293.70 486.94 2650 1.00
## y_rep[16] 520.04 4.08 219.71 285.56 480.24 805.54 2897 1.00
## y_rep[17] 628.25 5.15 267.22 351.33 574.18 983.32 2693 1.00
## y_rep[18] 211.14 1.56 87.32 116.92 195.11 326.18 3152 1.00
## y_rep[19] 236.57 1.93 99.36 130.26 217.46 366.83 2656 1.00
## y_rep[20] 217.01 1.62 90.55 120.61 200.13 332.60 3106 1.00
## y_rep[21] 646.16 4.99 272.18 357.61 600.56 999.58 2980 1.00
## y_rep[22] 359.18 2.68 146.04 198.58 333.78 549.70 2965 1.00
## y_rep[23] 209.11 1.61 88.54 114.33 192.06 330.89 3040 1.00
## y_rep[24] 236.95 1.86 100.29 130.51 217.02 364.48 2900 1.00
## y_rep[25] 325.49 2.37 135.97 181.08 297.77 508.28 3299 1.00
## y_rep[26] 454.59 3.56 197.07 246.73 417.03 713.34 3071 1.00
## y_rep[27] 312.41 2.44 129.23 175.49 287.59 477.27 2799 1.00
## y_rep[28] 476.36 3.83 199.26 264.56 438.67 728.30 2706 1.00
## y_rep[29] 524.10 3.95 220.56 289.01 480.49 815.51 3111 1.00
## y_rep[30] 711.82 5.82 299.89 385.81 651.58 1099.93 2656 1.00
## y_rep[31] 218.52 1.71 90.28 121.97 200.83 339.42 2796 1.00
## y_rep[32] 622.76 4.56 255.41 346.31 573.70 964.86 3139 1.00
## y_rep[33] 215.69 1.65 89.62 119.88 198.42 335.86 2942 1.00
## y_rep[34] 216.64 1.64 88.04 121.56 200.60 330.28 2894 1.00
## y_rep[35] 648.11 5.04 274.42 355.90 596.20 1014.97 2962 1.00
## y_rep[36] 625.61 5.12 259.78 341.91 578.00 968.14 2575 1.00
## y_rep[37] 512.03 4.00 212.99 290.37 469.67 786.98 2831 1.00
## y_rep[38] 312.07 2.32 127.21 176.99 288.02 477.73 3006 1.00
## y_rep[39] 352.24 2.86 147.43 195.15 323.78 542.98 2656 1.00
## y_rep[40] 633.86 4.82 267.32 341.51 587.57 978.47 3074 1.00
## y_rep[41] 469.48 3.52 191.59 264.47 437.47 714.18 2969 1.00
## y_rep[42] 322.33 2.31 130.57 180.54 298.82 494.56 3192 1.00
## y_rep[43] 459.78 3.56 193.10 251.98 420.69 716.99 2940 1.00
## y_rep[44] 209.77 1.53 85.56 117.88 193.45 321.56 3140 1.00
## y_rep[45] 238.07 1.81 98.86 131.70 220.23 368.56 2968 1.00
## y_rep[46] 696.90 5.05 277.67 374.51 654.63 1071.83 3019 1.00
## y_rep[47] 326.31 2.59 136.07 181.44 300.12 511.02 2765 1.00
## y_rep[48] 476.59 3.67 201.23 263.16 441.20 740.67 3001 1.00
## y_rep[49] 320.24 2.48 133.63 175.55 297.85 490.41 2913 1.00
## y_rep[50] 514.22 4.09 219.03 284.12 468.20 800.51 2870 1.00
## y_rep[51] 472.72 3.64 191.98 261.79 439.92 730.84 2789 1.00
## y_rep[52] 632.29 4.86 263.59 342.35 586.56 966.36 2936 1.00
## y_rep[53] 236.61 2.07 99.59 129.97 217.35 368.73 2310 1.00
## y_rep[54] 712.70 5.30 288.05 403.55 661.99 1087.94 2958 1.00
## y_rep[55] 215.46 1.69 90.05 119.87 199.87 331.06 2849 1.00
## y_rep[56] 306.65 2.26 124.20 172.55 285.35 469.40 3008 1.00
## y_rep[57] 215.31 1.61 88.74 118.93 199.44 328.84 3024 1.00
## y_rep[58] 209.24 1.65 90.68 114.60 192.42 321.68 3022 1.00
## y_rep[59] 357.61 2.71 149.54 200.39 327.95 551.35 3049 1.00
## y_rep[60] 657.92 5.36 275.91 364.88 612.42 1008.26 2649 1.00
## y_rep[61] 455.39 3.44 187.65 255.79 420.40 699.29 2969 1.00
## y_rep[62] 321.76 2.55 133.63 177.77 299.92 489.85 2750 1.00
## y_rep[63] 324.47 2.44 133.27 176.73 298.78 504.38 2977 1.00
## y_rep[64] 310.29 2.39 128.54 172.46 287.44 479.37 2893 1.00
## y_rep[65] 211.65 1.60 88.24 117.89 195.19 321.93 3028 1.00
## y_rep[66] 639.23 5.08 273.52 352.30 585.14 996.80 2901 1.00
## y_rep[67] 214.87 1.65 88.77 118.88 199.05 325.46 2884 1.00
## y_rep[68] 515.63 3.79 210.95 285.53 478.77 791.29 3096 1.00
## y_rep[69] 712.03 5.54 296.64 396.10 656.49 1094.66 2867 1.00
## y_rep[70] 466.39 3.55 193.32 257.05 429.92 715.43 2963 1.00
## y_rep[71] 457.64 3.48 193.33 253.94 419.67 703.25 3084 1.00
## y_rep[72] 220.43 1.71 92.85 122.44 203.40 338.82 2962 1.00
## y_rep[73] 235.82 1.71 94.25 132.57 220.64 354.33 3032 1.00
## y_rep[74] 354.50 2.60 145.42 196.05 329.20 544.08 3117 1.00
## y_rep[75] 620.21 4.59 257.26 344.13 574.08 950.34 3141 1.00
## y_rep[76] 322.11 2.35 131.06 180.73 296.79 494.43 3117 1.00
## y_rep[77] 453.07 3.55 190.65 257.57 413.84 701.10 2878 1.00
## y_rep[78] 208.68 1.55 86.89 114.31 193.10 325.13 3125 1.00
## y_rep[79] 236.56 1.78 98.31 133.37 218.65 358.02 3051 1.00
## y_rep[80] 307.41 2.29 126.82 168.53 287.12 472.68 3078 1.00
## y_rep[81] 221.94 1.72 92.98 122.51 204.45 346.75 2929 1.00
## y_rep[82] 215.96 1.63 90.46 120.66 196.57 333.50 3093 1.00
## y_rep[83] 524.20 3.99 216.56 291.43 483.12 815.32 2940 1.00
## y_rep[84] 635.68 4.86 262.28 354.93 588.33 974.36 2917 1.00
## y_rep[85] 321.47 2.41 134.63 180.63 293.96 504.18 3113 1.00
## y_rep[86] 478.19 3.94 202.02 260.97 441.48 739.24 2632 1.00
## y_rep[87] 707.61 5.51 307.52 392.63 648.13 1087.69 3120 1.00
## y_rep[88] 352.00 2.67 142.96 195.17 328.56 537.25 2862 1.00
## y_rep[89] 462.79 3.57 196.97 253.79 427.56 714.09 3050 1.00
## y_rep[90] 621.79 4.50 252.90 343.12 578.36 955.09 3158 1.00
## y_rep[91] 239.39 1.90 99.98 131.62 220.63 368.74 2760 1.00
## y_rep[92] 519.12 3.87 213.53 286.85 482.78 805.03 3039 1.00
## y_rep[93] 213.71 1.62 88.66 120.35 195.82 332.04 2998 1.00
## y_rep[94] 218.23 1.66 92.08 118.01 201.93 337.74 3081 1.00
## y_rep[95] 310.25 2.51 129.67 172.24 285.09 475.32 2664 1.00
## y_rep[96] 323.11 2.55 136.37 177.70 295.42 496.82 2868 1.00
## y_rep[97] 715.07 5.44 301.51 390.89 653.79 1122.67 3073 1.00
## y_rep[98] 354.24 2.63 142.77 198.44 327.47 545.24 2943 1.00
## y_rep[99] 216.79 1.58 88.11 120.03 201.27 326.51 3092 1.00
## y_rep[100] 354.95 2.89 150.13 194.81 327.79 542.28 2697 1.00
## y_rep[101] 628.67 4.67 255.84 347.98 579.27 959.84 3006 1.00
## y_rep[102] 475.13 3.49 194.16 262.76 441.66 728.80 3101 1.00
## y_rep[103] 641.26 4.79 263.34 358.46 590.24 989.08 3019 1.00
## y_rep[104] 326.24 2.59 136.46 180.92 300.16 512.38 2780 1.00
## y_rep[105] 209.78 1.53 82.01 115.83 195.91 319.33 2873 1.00
## y_rep[106] 309.43 2.29 124.72 170.55 287.31 474.94 2969 1.00
## y_rep[107] 724.30 5.44 301.00 401.14 669.09 1121.59 3060 1.00
## y_rep[108] 237.46 1.76 95.31 134.29 218.82 365.90 2931 1.00
## y_rep[109] 320.37 2.40 133.48 177.29 295.94 492.25 3097 1.00
## y_rep[110] 455.96 3.50 190.76 256.09 416.50 708.77 2969 1.00
## y_rep[111] 214.61 1.70 90.07 118.91 199.02 322.86 2809 1.00
## y_rep[112] 653.46 4.88 271.61 362.49 610.11 995.82 3099 1.00
## y_rep[113] 511.72 3.92 218.41 279.64 471.70 787.79 3101 1.00
## y_rep[114] 221.13 1.78 95.24 122.01 202.47 341.66 2849 1.00
## y_rep[115] 463.80 3.52 192.81 257.53 429.57 706.65 2994 1.00
## y_rep[116] 352.11 2.59 141.55 198.41 327.13 539.77 2994 1.00
## y_rep[117] 237.95 1.78 99.74 130.43 219.36 368.50 3123 1.00
## y_rep[118] 321.30 2.46 132.79 176.06 299.09 498.76 2905 1.00
## y_rep[119] 634.95 4.77 256.70 352.15 594.24 965.58 2899 1.00
## y_rep[120] 466.79 3.66 193.56 255.45 435.20 714.93 2802 1.00
## y_rep[121] 516.60 3.98 214.78 287.26 477.24 794.22 2917 1.00
## y_rep[122] 206.53 1.64 87.47 115.10 191.26 316.67 2854 1.00
## y_rep[123] 702.85 5.39 287.79 392.97 651.98 1089.21 2855 1.00
## y_rep[124] 312.32 2.35 128.84 176.10 288.44 480.71 3002 1.00
## y_rep[125] 214.82 1.66 91.88 117.44 197.00 330.99 3071 1.00
## y_rep[126] 219.83 1.67 90.55 122.56 202.39 340.90 2948 1.00
## y_rep[127] 237.13 1.86 99.87 130.23 219.35 363.00 2873 1.00
## y_rep[128] 699.96 5.20 289.58 391.02 650.13 1081.21 3103 1.00
## y_rep[129] 463.74 3.66 196.35 256.56 427.87 710.15 2885 1.00
## y_rep[130] 209.01 1.62 88.30 116.83 193.01 323.72 2972 1.00
## y_rep[131] 322.85 2.54 136.80 178.48 295.56 492.35 2906 1.00
## y_rep[132] 353.50 2.94 148.50 192.86 329.46 545.85 2553 1.00
## y_rep[133] 309.38 2.38 126.89 169.56 289.26 473.34 2842 1.00
## y_rep[134] 326.55 2.62 137.82 179.37 300.53 505.17 2761 1.00
## y_rep[135] 213.99 1.66 86.81 118.72 200.24 326.30 2746 1.00
## y_rep[136] 513.40 3.96 213.63 280.57 471.43 796.12 2903 1.00
## y_rep[137] 651.91 4.86 275.83 362.43 608.27 992.38 3226 1.00
## y_rep[138] 472.46 3.68 195.77 260.42 441.65 723.36 2834 1.00
## y_rep[139] 216.15 1.67 90.66 119.79 200.44 334.26 2955 1.00
## y_rep[140] 635.67 5.32 277.49 353.50 585.70 961.39 2721 1.00
## y_rep[141] 352.40 2.72 143.81 195.58 327.36 532.04 2786 1.00
## y_rep[142] 323.58 2.44 134.68 180.53 297.39 503.19 3041 1.00
## y_rep[143] 642.00 4.91 267.29 350.38 589.58 993.12 2964 1.00
## y_rep[144] 218.37 1.62 88.45 122.89 203.19 333.26 2988 1.00
## y_rep[145] 210.16 1.64 87.16 115.52 193.80 328.81 2817 1.00
## y_rep[146] 318.30 2.45 131.01 179.00 295.76 486.03 2859 1.00
## y_rep[147] 452.97 3.38 186.17 247.74 423.19 697.21 3039 1.00
## y_rep[148] 237.61 1.77 95.54 131.62 221.79 364.30 2924 1.00
## y_rep[149] 706.15 5.63 284.94 395.01 650.28 1075.23 2557 1.00
## y_rep[150] 313.80 2.37 126.89 177.00 290.47 481.97 2856 1.00
## y_rep[151] 463.88 3.84 192.31 257.99 429.38 715.14 2507 1.00
## y_rep[152] 214.97 1.74 88.18 120.67 197.56 334.98 2562 1.00
## y_rep[153] 625.28 4.79 268.66 341.18 574.83 963.38 3150 1.00
## y_rep[154] 522.18 4.02 218.14 287.69 481.66 815.54 2939 1.00
## y_rep[155] 353.03 2.61 144.20 198.14 327.42 543.48 3053 1.00
## y_rep[156] 216.56 1.66 90.16 120.15 200.45 333.50 2954 1.00
## y_rep[157] 649.53 5.12 266.30 361.11 602.13 998.70 2705 1.00
## y_rep[158] 219.47 1.69 91.53 123.57 202.16 334.19 2938 1.00
## y_rep[159] 712.49 5.42 297.54 395.19 662.12 1096.26 3019 1.00
## y_rep[160] 311.41 2.39 129.89 175.81 283.62 480.79 2950 1.00
## y_rep[161] 457.36 3.51 188.56 253.36 420.73 701.69 2888 1.00
## y_rep[162] 466.79 3.41 189.19 260.91 434.68 706.28 3084 1.00
## y_rep[163] 237.45 1.81 99.40 129.75 218.70 363.20 3029 1.00
## y_rep[164] 623.11 4.86 260.25 344.09 574.85 961.38 2864 1.00
## y_rep[165] 519.00 4.03 218.21 291.84 477.32 799.45 2929 1.00
## y_rep[166] 324.04 2.44 133.09 181.28 299.22 501.50 2982 1.00
## y_rep[167] 210.43 1.65 87.68 116.59 193.81 325.03 2815 1.00
## y_rep[168] 319.39 2.45 134.15 176.65 292.45 491.33 2988 1.00
## y_rep[169] 209.32 1.56 86.75 115.43 195.56 320.24 3084 1.00
## y_rep[170] 218.36 1.75 93.15 120.15 198.95 344.28 2842 1.00
## y_rep[171] 641.76 4.96 263.36 358.94 592.44 976.29 2825 1.00
## y_rep[172] 656.20 5.18 275.89 361.49 598.73 1026.20 2840 1.00
## y_rep[173] 215.09 1.75 92.19 117.74 198.38 331.53 2762 1.00
## y_rep[174] 240.58 1.89 102.38 132.09 220.26 374.62 2937 1.00
## y_rep[175] 470.37 3.57 193.57 262.30 435.49 724.12 2941 1.00
## y_rep[176] 325.15 2.43 138.45 177.58 298.85 499.69 3236 1.00
## y_rep[177] 312.61 2.40 128.87 175.07 287.15 476.46 2886 1.00
## y_rep[178] 315.35 2.32 130.15 176.46 291.99 485.05 3143 1.00
## y_rep[179] 469.54 3.53 197.11 259.50 435.49 719.19 3119 1.00
## y_rep[180] 354.84 2.77 149.65 194.76 326.75 541.40 2918 1.00
## y_rep[181] 214.19 1.62 87.20 119.90 198.58 329.54 2888 1.00
## y_rep[182] 647.51 4.84 257.54 364.64 601.43 979.18 2835 1.00
## y_rep[183] 468.38 3.49 194.49 260.39 433.07 728.77 3112 1.00
## y_rep[184] 218.60 1.66 90.07 123.88 200.59 340.92 2951 1.00
## y_rep[185] 237.78 1.78 97.77 134.51 220.66 367.52 3022 1.00
## y_rep[186] 325.49 2.39 133.69 180.57 304.13 495.24 3124 1.00
## y_rep[187] 211.37 1.59 88.39 117.62 196.62 324.71 3094 1.00
## y_rep[188] 356.12 2.62 149.33 194.92 329.97 546.11 3238 1.00
## y_rep[189] 235.17 1.81 95.60 131.05 217.69 366.77 2796 1.00
## y_rep[190] 210.55 1.53 85.43 118.03 195.82 321.62 3101 1.00
## y_rep[191] 654.92 5.17 276.98 358.79 605.33 1016.49 2875 1.00
## y_rep[192] 326.07 2.58 135.56 178.16 303.25 502.58 2758 1.00
## y_rep[193] 314.21 2.44 129.55 175.12 289.51 483.65 2827 1.00
## y_rep[194] 633.44 4.69 257.97 354.28 586.38 978.10 3020 1.00
## y_rep[195] 215.72 1.64 88.50 120.94 198.70 331.50 2930 1.00
## y_rep[196] 317.21 2.40 131.15 178.97 294.09 482.05 2976 1.00
## y_rep[197] 216.14 1.66 91.59 120.62 198.45 335.74 3045 1.00
## y_rep[198] 474.02 3.70 195.48 261.27 439.76 728.66 2788 1.00
## y_rep[199] 707.04 5.23 296.12 387.01 653.06 1098.88 3202 1.00
## y_rep[200] 520.30 4.19 223.71 285.29 477.34 806.91 2846 1.00
## y_rep[201] 472.09 3.56 194.13 263.29 436.26 725.68 2965 1.00
## y_rep[202] 356.27 2.70 149.04 198.08 326.01 559.82 3043 1.00
## y_rep[203] 326.85 2.52 140.01 180.60 302.60 505.36 3087 1.00
## y_rep[204] 457.14 3.45 192.35 252.66 419.57 709.61 3113 1.00
## y_rep[205] 238.87 1.82 100.20 131.49 219.67 369.11 3040 1.00
## y_rep[206] 465.46 3.55 193.94 257.11 428.07 721.11 2989 1.00
## y_rep[207] 354.92 2.91 155.88 194.14 324.09 559.18 2876 1.00
## y_rep[208] 217.87 1.71 89.48 119.48 200.13 335.48 2746 1.00
## y_rep[209] 317.02 2.35 132.77 177.55 291.99 491.27 3202 1.00
## y_rep[210] 641.20 4.67 258.98 359.79 597.57 966.33 3074 1.00
## y_rep[211] 651.18 5.28 271.81 363.38 600.60 993.27 2650 1.00
## y_rep[212] 513.46 3.88 207.22 286.56 477.01 775.02 2857 1.00
## y_rep[213] 218.80 1.66 91.39 120.05 202.30 340.30 3040 1.00
## y_rep[214] 474.51 3.83 194.87 260.80 443.26 723.97 2594 1.00
## y_rep[215] 208.22 1.54 83.66 116.09 192.98 318.30 2943 1.00
## y_rep[216] 315.03 2.66 131.09 175.52 288.92 493.12 2433 1.00
## y_rep[217] 624.48 4.89 267.19 340.14 574.47 978.70 2989 1.00
## y_rep[218] 631.34 4.77 260.84 346.21 582.86 984.17 2994 1.00
## y_rep[219] 208.40 1.61 87.07 113.09 192.84 322.02 2907 1.00
## y_rep[220] 468.74 3.50 193.80 260.31 437.50 714.42 3062 1.00
## y_rep[221] 318.43 2.52 128.63 181.67 294.14 479.03 2608 1.00
## y_rep[222] 219.83 1.66 91.99 122.85 203.45 338.97 3088 1.00
## y_rep[223] 310.71 2.39 129.17 174.20 286.70 472.92 2921 1.00
## y_rep[224] 353.05 2.57 141.67 199.11 330.67 539.87 3035 1.00
## y_rep[225] 455.45 3.60 193.11 252.47 418.41 695.55 2871 1.00
## y_rep[226] 214.39 1.60 88.94 120.82 197.71 330.04 3080 1.00
## y_rep[227] 241.48 1.80 98.89 132.55 224.38 374.76 3029 1.00
## y_rep[228] 469.42 3.59 194.49 259.82 432.54 720.41 2928 1.00
## y_rep[229] 639.16 4.67 262.21 358.13 589.82 978.31 3148 1.00
## y_rep[230] 322.67 2.63 134.80 177.54 299.55 494.78 2634 1.00
## y_rep[231] 655.20 5.19 271.79 365.95 602.02 1012.75 2743 1.00
## y_rep[232] 322.50 2.51 137.43 176.18 296.43 501.85 3003 1.00
## y_rep[233] 516.70 3.88 215.47 280.85 479.11 788.59 3084 1.00
## y_rep[234] 211.08 1.64 86.95 116.09 196.12 326.85 2816 1.00
## y_rep[235] 214.93 1.70 88.30 119.75 199.54 327.59 2695 1.00
## y_rep[236] 468.93 3.68 198.89 263.69 431.19 715.04 2917 1.00
## y_rep[237] 217.30 1.68 90.08 120.66 199.24 330.80 2861 1.00
## y_rep[238] 234.22 1.76 94.80 130.95 217.34 358.49 2886 1.00
## y_rep[239] 311.64 2.32 127.47 173.26 286.91 483.17 3030 1.00
## y_rep[240] 354.25 2.55 142.95 196.33 329.40 546.38 3152 1.00
## y_rep[241] 629.64 4.76 263.11 347.15 580.67 954.02 3057 1.00
## y_rep[242] 453.07 3.32 186.90 250.68 418.36 693.37 3166 1.00
## y_rep[243] 324.62 2.76 134.28 177.73 301.31 500.60 2360 1.00
## y_rep[244] 628.99 4.71 254.20 348.33 583.64 954.28 2908 1.00
## y_rep[245] 712.79 5.35 291.48 397.96 662.15 1077.96 2967 1.00
## y_rep[246] 516.81 4.12 216.58 282.25 479.53 795.31 2767 1.00
## y_rep[247] 318.14 2.44 131.39 180.96 292.55 488.47 2910 1.00
## y_rep[248] 704.39 5.39 287.37 384.10 654.23 1085.11 2844 1.00
## y_rep[249] 213.48 1.59 88.59 117.11 196.69 333.55 3091 1.00
## y_rep[250] 645.79 4.63 263.29 355.75 594.94 989.77 3238 1.00
## y_rep[251] 212.02 1.52 86.01 118.55 196.71 326.06 3199 1.00
## y_rep[252] 324.88 2.50 136.15 182.77 298.21 503.97 2972 1.00
## y_rep[253] 472.69 3.57 197.87 262.08 436.03 732.56 3073 1.00
## y_rep[254] 472.52 3.66 199.82 262.19 434.43 722.71 2976 1.00
## y_rep[255] 314.82 2.42 128.36 173.64 293.02 485.87 2817 1.00
## y_rep[256] 216.00 1.62 89.06 117.89 200.04 333.93 3007 1.00
## y_rep[257] 646.35 4.97 267.33 355.62 598.65 996.91 2890 1.00
## y_rep[258] 633.96 4.92 270.14 351.55 584.38 979.84 3017 1.00
## y_rep[259] 238.26 1.82 99.61 133.83 219.73 366.45 3008 1.00
## y_rep[260] 454.37 3.62 190.13 250.40 423.91 688.14 2751 1.00
## y_rep[261] 351.62 2.63 144.26 194.36 325.60 539.10 3004 1.00
## y_rep[262] 461.34 3.49 188.16 257.30 427.50 706.93 2914 1.00
## y_rep[263] 639.46 4.96 267.19 351.18 588.60 1007.31 2906 1.00
## y_rep[264] 207.56 1.58 85.32 115.40 191.87 316.82 2899 1.00
## y_rep[265] 353.71 2.65 142.67 199.05 325.89 545.67 2888 1.00
## y_rep[266] 214.92 1.72 88.07 119.74 199.87 327.67 2637 1.00
## y_rep[267] 477.44 3.73 202.72 261.35 438.94 743.21 2947 1.00
## y_rep[268] 521.41 4.14 215.67 287.77 483.40 796.13 2719 1.00
## y_rep[269] 315.10 2.30 127.07 177.57 291.43 478.42 3065 1.00
## y_rep[270] 237.29 1.92 98.07 131.29 217.99 364.31 2610 1.00
## y_rep[271] 699.60 5.34 287.87 379.60 650.96 1071.19 2902 1.00
## y_rep[272] 216.93 1.73 88.57 122.45 200.16 333.13 2629 1.00
## y_rep[273] 641.70 5.03 268.21 353.19 595.23 993.72 2840 1.00
## y_rep[274] 319.21 2.39 130.22 178.37 297.01 490.42 2963 1.00
## y_rep[275] 324.24 2.36 133.25 178.54 304.39 499.33 3193 1.00
## y_rep[276] 209.48 1.73 86.05 115.49 193.37 325.00 2486 1.00
## y_rep[277] 352.87 2.79 146.93 193.20 326.26 542.10 2774 1.00
## y_rep[278] 238.64 1.82 101.94 131.15 218.00 372.93 3141 1.00
## y_rep[279] 629.37 4.87 272.02 348.08 576.66 975.73 3122 1.00
## y_rep[280] 217.20 1.64 90.32 121.16 200.52 334.31 3036 1.00
## y_rep[281] 312.66 2.29 128.93 174.51 289.61 479.00 3175 1.00
## y_rep[282] 327.99 2.60 133.48 182.09 305.50 502.06 2629 1.00
## y_rep[283] 322.74 2.43 133.64 181.09 297.43 495.09 3021 1.00
## y_rep[284] 458.41 3.52 192.80 258.09 420.40 698.94 3008 1.00
## y_rep[285] 215.30 1.65 87.67 119.40 198.74 327.36 2810 1.00
## y_rep[286] 712.63 5.95 295.10 394.44 661.31 1090.81 2461 1.00
## y_rep[287] 518.48 4.04 212.51 289.14 475.64 802.55 2766 1.00
## y_rep[288] 453.83 3.60 188.96 253.96 415.57 699.84 2762 1.00
## y_rep[289] 462.72 3.77 193.91 252.56 424.72 721.39 2645 1.00
## y_rep[290] 218.30 1.64 88.24 121.67 203.09 327.88 2886 1.00
## y_rep[291] 469.10 3.70 196.42 254.61 435.82 716.58 2818 1.00
## y_rep[292] 630.70 5.02 266.35 350.13 581.81 977.10 2810 1.00
## y_rep[293] 643.53 4.92 265.54 350.05 594.86 994.49 2909 1.00
## y_rep[294] 210.26 1.65 86.57 116.66 194.10 326.59 2753 1.00
## y_rep[295] 318.29 2.36 129.28 179.76 295.20 484.48 2996 1.00
## y_rep[296] 314.43 2.49 129.07 174.69 292.22 478.49 2679 1.00
## y_rep[297] 216.99 1.70 90.85 120.02 199.61 335.06 2870 1.00
## y_rep[298] 518.71 4.16 220.74 286.36 478.47 797.28 2810 1.00
## y_rep[299] 653.64 5.89 279.82 355.45 598.15 1021.03 2258 1.00
## y_rep[300] 323.04 2.53 135.84 175.89 296.88 496.35 2892 1.00
## y_rep[301] 710.33 5.65 304.46 382.56 656.24 1096.85 2908 1.00
## y_rep[302] 351.59 2.66 145.74 198.11 323.54 534.98 3004 1.00
## y_rep[303] 237.51 1.82 98.06 130.60 220.22 366.95 2916 1.00
## y_rep[304] 203.10 1.53 82.48 113.72 188.34 312.95 2922 1.00
## y_rep[305] 304.66 2.29 127.20 169.24 281.19 465.75 3089 1.00
## y_rep[306] 207.79 1.56 88.16 114.25 192.01 321.31 3191 1.00
## y_rep[307] 297.79 2.30 124.24 164.53 271.48 462.97 2907 1.00
## y_rep[308] 226.67 1.72 94.27 124.31 210.55 352.65 2989 1.00
## y_rep[309] 311.19 2.44 130.90 172.76 284.77 486.35 2872 1.00
## y_rep[310] 453.04 3.31 183.32 255.87 418.69 692.45 3059 1.00
## y_rep[311] 198.26 1.50 81.14 109.87 185.78 300.56 2908 1.00
## y_rep[312] 312.89 2.30 129.53 173.44 290.51 481.08 3166 1.00
## y_rep[313] 226.14 1.76 95.10 121.71 209.72 349.05 2916 1.00
## y_rep[314] 456.30 3.38 183.24 257.55 424.34 694.07 2947 1.00
## y_rep[315] 207.02 1.64 89.26 115.85 188.85 321.34 2946 1.00
## y_rep[316] 196.30 1.48 80.41 109.76 182.11 298.11 2955 1.00
## y_rep[317] 436.49 3.31 182.76 240.40 402.54 669.82 3048 1.00
## y_rep[318] 203.45 1.52 83.36 115.64 189.43 307.91 3005 1.00
## y_rep[319] 307.50 2.47 126.23 166.90 284.88 472.30 2621 1.00
## y_rep[320] 295.78 2.29 122.16 160.97 274.43 454.65 2834 1.00
## y_rep[321] 632.53 5.11 261.33 352.01 588.01 961.38 2617 1.00
## y_rep[322] 609.32 4.95 255.37 331.88 566.76 942.57 2665 1.00
## y_rep[323] 202.28 1.55 85.55 111.25 186.22 308.88 3058 1.00
## y_rep[324] 694.52 5.46 299.36 375.56 640.22 1082.80 3001 1.00
## y_rep[325] 204.76 1.48 81.07 115.31 191.15 315.50 2989 1.00
## y_rep[326] 227.63 1.83 96.66 124.38 210.22 352.12 2802 1.00
## y_rep[327] 295.40 2.24 121.35 165.15 271.59 451.90 2942 1.00
## y_rep[328] 310.11 2.32 126.60 174.63 286.67 466.44 2978 1.00
## y_rep[329] 304.21 2.37 123.16 170.10 282.41 464.54 2696 1.00
## y_rep[330] 447.34 3.46 185.39 253.27 411.40 690.59 2864 1.00
## y_rep[331] 621.57 4.65 255.26 351.98 573.87 946.00 3007 1.00
## y_rep[332] 200.49 1.57 86.33 111.25 184.51 306.35 3021 1.00
## y_rep[333] 339.83 2.68 144.14 188.11 312.56 519.27 2893 1.00
## y_rep[334] 497.11 3.73 199.43 278.71 465.48 764.55 2852 1.00
## y_rep[335] 501.25 3.87 210.69 275.60 465.19 770.42 2971 1.00
## y_rep[336] 638.55 4.92 264.32 353.97 591.50 980.38 2892 1.00
## y_rep[337] 695.32 5.65 290.68 382.78 642.22 1060.80 2651 1.00
## y_rep[338] 207.49 1.63 84.77 115.09 191.63 319.00 2716 1.00
## y_rep[339] 308.50 2.20 124.66 173.37 287.81 469.00 3220 1.00
## y_rep[340] 198.32 1.49 82.59 110.54 183.69 308.22 3093 1.00
## y_rep[341] 204.23 1.58 83.26 114.41 189.36 312.59 2787 1.00
## y_rep[342] 224.66 1.66 93.16 123.73 207.63 347.56 3144 1.00
## y_rep[343] 336.71 2.52 140.31 187.46 311.12 516.45 3088 1.00
## y_rep[344] 454.27 3.24 181.37 255.33 421.72 688.02 3134 1.00
## y_rep[345] 200.27 1.51 82.01 112.06 185.94 303.64 2942 1.00
## y_rep[346] 308.39 2.46 136.79 168.96 283.24 478.29 3083 1.00
## y_rep[347] 205.68 1.55 83.89 114.98 192.52 314.66 2923 1.00
## y_rep[348] 311.64 2.46 132.23 172.21 286.16 481.94 2880 1.00
## y_rep[349] 629.45 5.08 264.16 350.20 579.99 991.77 2705 1.00
## y_rep[350] 296.86 2.21 123.33 164.06 273.81 457.24 3126 1.00
## y_rep[351] 448.43 3.61 191.63 251.65 413.81 686.21 2824 1.00
## y_rep[352] 496.89 3.72 205.44 274.68 459.05 757.91 3055 1.00
## y_rep[353] 332.21 2.60 137.94 182.31 306.34 516.83 2823 1.00
## y_rep[354] 202.46 1.56 82.52 112.05 188.43 309.55 2791 1.00
## y_rep[355] 700.85 5.54 302.67 387.09 641.04 1087.52 2982 1.00
## y_rep[356] 633.15 4.68 262.62 352.45 584.39 981.26 3152 1.00
## y_rep[357] 224.39 1.64 90.58 126.14 208.12 341.97 3040 1.00
## y_rep[358] 459.36 3.40 189.45 253.75 425.61 704.03 3114 1.00
## y_rep[359] 639.97 4.78 262.76 360.72 590.59 983.02 3017 1.00
## y_rep[360] 447.91 3.76 189.12 246.58 411.78 688.75 2536 1.00
## y_rep[361] 226.66 1.84 93.67 124.20 210.41 353.36 2580 1.00
## y_rep[362] 499.35 3.92 207.93 280.31 458.68 765.97 2815 1.00
## y_rep[363] 309.23 2.43 131.30 169.92 285.28 475.74 2909 1.00
## y_rep[364] 204.36 1.54 83.34 116.48 188.49 313.08 2911 1.00
## y_rep[365] 297.77 2.24 125.40 160.05 276.70 463.75 3129 1.00
## y_rep[366] 202.15 1.52 84.99 110.72 186.75 313.78 3133 1.00
## y_rep[367] 699.81 5.23 288.18 389.79 646.55 1077.87 3031 1.00
## y_rep[368] 632.39 4.81 260.99 354.64 587.50 973.53 2947 1.00
## y_rep[369] 199.62 1.45 80.59 109.97 186.58 304.59 3091 1.00
## y_rep[370] 341.04 2.67 143.19 191.69 311.86 526.51 2881 1.00
## y_rep[371] 456.60 3.53 191.59 252.95 422.94 690.65 2949 1.00
## y_rep[372] 305.52 2.22 122.54 170.04 283.37 467.87 3043 1.00
## y_rep[373] 202.07 1.54 82.38 113.92 186.20 312.04 2873 1.00
## y_rep[374] 625.43 4.71 262.32 345.95 575.37 971.27 3108 1.00
## y_rep[375] 605.99 5.43 252.99 336.38 555.01 938.67 2173 1.00
## y_rep[376] 203.06 1.50 86.86 112.67 184.85 313.06 3357 1.00
## y_rep[377] 692.11 5.04 276.07 393.17 638.23 1057.09 3004 1.00
## y_rep[378] 199.37 1.53 82.14 109.86 183.97 310.11 2866 1.00
## y_rep[379] 452.17 3.40 186.01 252.91 417.75 697.10 2990 1.00
## y_rep[380] 297.60 2.27 126.96 163.53 272.57 466.29 3140 1.00
## y_rep[381] 309.29 2.35 128.85 170.94 286.17 474.69 3015 1.00
## y_rep[382] 502.23 3.88 210.39 276.70 462.68 776.21 2934 1.00
## y_rep[383] 441.97 3.18 180.75 243.53 410.71 671.27 3222 1.00
## y_rep[384] 335.49 2.55 139.82 185.40 305.79 524.69 2996 1.00
## y_rep[385] 224.06 1.76 92.10 125.04 208.08 346.99 2732 1.00
## y_rep[386] 300.25 2.28 125.62 162.68 279.10 463.27 3028 1.00
## y_rep[387] 295.51 2.22 121.70 163.26 272.49 455.97 2996 1.00
## y_rep[388] 227.69 1.76 95.31 125.57 210.15 352.76 2922 1.00
## y_rep[389] 602.71 4.35 238.55 342.09 562.24 920.35 3011 1.00
## y_rep[390] 634.12 4.59 265.17 353.80 583.52 974.71 3331 1.00
## y_rep[391] 499.89 3.67 201.40 282.20 461.40 761.44 3008 1.00
## y_rep[392] 308.80 2.39 128.08 171.01 285.70 480.57 2879 1.00
## y_rep[393] 452.43 3.31 179.98 252.78 423.41 694.04 2961 1.00
## y_rep[394] 439.14 3.33 182.09 244.05 407.13 674.66 2988 1.00
## y_rep[395] 204.44 1.56 84.65 113.87 187.54 319.94 2938 1.00
## y_rep[396] 198.26 1.61 83.53 108.79 183.17 303.54 2697 1.00
## y_rep[397] 206.15 1.60 86.59 113.36 191.20 317.36 2944 1.00
## y_rep[398] 336.90 2.45 136.84 187.40 315.67 513.05 3116 1.00
## y_rep[399] 225.41 1.75 95.57 123.27 208.05 345.24 2984 1.00
## y_rep[400] 196.73 1.43 79.56 109.31 183.55 302.24 3101 1.00
## y_rep[401] 686.13 5.49 285.17 379.77 623.90 1064.71 2696 1.00
## y_rep[402] 199.44 1.47 80.69 112.03 185.15 303.37 3017 1.00
## y_rep[403] 504.46 3.74 210.12 279.62 465.35 766.02 3161 1.00
## y_rep[404] 206.17 1.60 87.57 113.51 189.52 318.43 3012 1.00
## y_rep[405] 442.67 3.57 188.80 245.15 409.98 675.94 2799 1.00
## y_rep[406] 605.28 4.47 253.74 338.74 552.10 950.24 3220 1.00
## y_rep[407] 334.55 2.73 141.52 182.93 307.99 515.71 2693 1.00
## y_rep[408] 294.90 2.20 119.31 166.58 275.05 446.68 2933 1.00
## y_rep[409] 302.27 2.21 121.71 171.60 280.09 465.75 3020 1.00
## y_rep[410] 308.21 2.45 132.92 169.76 281.97 474.87 2938 1.00
## y_rep[411] 197.17 1.56 82.88 110.35 180.85 304.87 2838 1.00
## y_rep[412] 630.89 4.76 263.55 349.41 583.87 975.59 3072 1.00
## y_rep[413] 303.28 2.21 122.64 171.54 279.92 466.18 3076 1.00
## y_rep[414] 450.16 3.49 193.69 241.12 417.61 692.13 3083 1.00
## y_rep[415] 223.96 1.67 92.94 123.76 207.12 345.87 3083 1.00
## y_rep[416] 336.49 2.43 137.85 189.70 309.70 512.84 3223 1.00
## y_rep[417] 297.44 2.28 123.42 162.74 273.61 459.73 2928 1.00
## y_rep[418] 627.85 4.79 258.92 347.95 578.54 965.38 2919 1.00
## y_rep[419] 203.15 1.59 86.00 111.02 187.86 309.42 2924 1.00
## y_rep[420] 462.27 3.50 189.90 250.91 429.84 707.29 2949 1.00
## y_rep[421] 205.58 1.57 85.34 114.35 189.17 316.22 2937 1.00
## y_rep[422] 333.95 2.43 135.48 187.40 307.91 509.36 3112 1.00
## y_rep[423] 451.84 3.46 191.19 250.39 415.24 698.30 3048 1.00
## y_rep[424] 306.68 2.37 129.47 168.26 281.00 473.62 2993 1.00
## y_rep[425] 301.00 2.32 127.11 162.57 278.65 466.18 2999 1.00
## y_rep[426] 205.22 1.56 85.15 114.87 189.35 314.94 2963 1.00
## y_rep[427] 491.07 3.75 201.47 273.33 456.03 746.14 2884 1.00
## y_rep[428] 304.02 2.17 122.05 167.65 279.22 469.71 3161 1.00
## y_rep[429] 225.38 1.80 94.63 123.86 208.57 350.37 2752 1.00
## y_rep[430] 690.39 5.34 283.84 384.20 633.93 1056.23 2829 1.00
## y_rep[431] 197.66 1.58 80.08 110.11 184.19 304.03 2566 1.00
## y_rep[432] 455.39 3.44 191.33 247.06 418.49 703.09 3095 1.00
## y_rep[433] 625.49 5.04 269.31 336.78 577.11 964.94 2860 1.00
## y_rep[434] 202.65 1.60 86.67 112.38 185.58 314.81 2950 1.00
## y_rep[435] 633.07 4.66 258.92 345.53 586.20 972.14 3086 1.00
## y_rep[436] 296.57 2.46 122.55 163.89 274.08 455.48 2488 1.00
## y_rep[437] 496.93 3.79 201.73 274.21 460.66 770.39 2827 1.00
## y_rep[438] 198.45 1.64 83.04 109.36 182.78 308.01 2558 1.00
## y_rep[439] 222.47 1.62 90.24 125.22 205.88 339.71 3111 1.00
## y_rep[440] 205.48 1.59 86.70 114.19 190.05 313.13 2982 1.00
## y_rep[441] 700.17 5.55 295.14 382.37 645.66 1086.64 2826 1.00
## y_rep[442] 337.46 2.64 142.58 188.01 309.16 520.52 2907 1.00
## y_rep[443] 204.97 1.55 85.11 114.26 188.92 318.15 3013 1.00
## y_rep[444] 225.31 1.74 96.59 122.98 207.24 346.69 3068 1.00
## y_rep[445] 612.81 4.70 252.87 339.45 563.99 937.22 2889 1.00
## y_rep[446] 639.04 4.69 261.04 358.15 594.80 971.00 3093 1.00
## y_rep[447] 437.11 3.36 185.97 239.52 399.71 672.30 3072 1.00
## y_rep[448] 694.94 5.49 295.24 383.75 637.72 1082.04 2888 1.00
## y_rep[449] 502.09 3.92 218.52 275.52 460.39 779.91 3101 1.00
## y_rep[450] 293.60 2.25 120.78 162.51 272.23 450.87 2894 1.00
## y_rep[451] 307.75 2.27 123.61 174.93 286.22 468.55 2975 1.00
## y_rep[452] 454.64 3.61 189.24 250.00 422.35 690.36 2746 1.00
## y_rep[453] 208.35 1.55 86.34 115.66 192.72 318.20 3123 1.00
## y_rep[454] 304.05 2.24 123.25 170.32 281.46 470.71 3020 1.00
## y_rep[455] 201.68 1.56 83.52 113.05 186.81 305.22 2851 1.00
## y_rep[456] 199.12 1.52 83.44 109.65 183.40 304.25 3012 1.00
## y_rep[457] 337.38 2.49 138.71 184.20 314.29 517.65 3111 1.00
## y_rep[458] 454.39 3.43 188.59 255.22 420.45 696.81 3021 1.00
## y_rep[459] 197.77 1.53 83.04 107.75 184.24 299.05 2946 1.00
## y_rep[460] 204.99 1.50 82.91 115.97 189.64 310.99 3050 1.00
## y_rep[461] 301.57 2.46 125.98 163.22 280.97 465.10 2615 1.00
## y_rep[462] 224.43 1.73 93.31 123.45 208.02 347.42 2901 1.00
## y_rep[463] 204.25 1.62 84.16 114.36 188.01 317.67 2687 1.00
## y_rep[464] 306.24 2.39 127.52 169.50 283.58 467.19 2849 1.00
## y_rep[465] 201.72 1.51 82.17 111.93 185.89 313.58 2946 1.00
## y_rep[466] 198.99 1.57 84.61 108.70 183.89 306.50 2920 1.00
## y_rep[467] 223.59 1.67 90.86 125.51 207.14 344.49 2966 1.00
## y_rep[468] 491.49 3.75 202.99 276.97 453.94 747.10 2934 1.00
## y_rep[469] 307.52 2.30 126.21 172.62 285.30 468.76 3013 1.00
## y_rep[470] 299.37 2.27 123.35 165.30 277.42 459.39 2944 1.00
## y_rep[471] 334.52 2.53 138.51 183.30 309.32 514.15 2993 1.00
## y_rep[472] 697.09 5.24 288.10 392.29 642.64 1071.21 3020 1.00
## y_rep[473] 644.30 5.02 270.27 356.83 592.25 995.33 2897 1.00
## y_rep[474] 451.92 3.42 190.32 251.01 418.94 688.21 3092 1.00
## y_rep[475] 205.58 1.60 86.15 114.79 188.04 319.88 2903 1.00
## y_rep[476] 453.44 3.36 187.58 252.79 421.84 698.08 3119 1.00
## y_rep[477] 629.11 4.94 266.46 351.14 581.08 969.61 2907 1.00
## y_rep[478] 688.86 5.34 284.45 378.60 637.74 1052.96 2833 1.00
## y_rep[479] 203.90 1.86 87.05 112.28 185.81 321.60 2202 1.00
## y_rep[480] 334.34 2.59 141.21 183.33 307.69 513.60 2967 1.00
## y_rep[481] 228.40 1.72 94.13 126.06 210.28 350.02 2981 1.00
## y_rep[482] 203.59 1.57 85.83 111.65 187.35 320.31 2970 1.00
## y_rep[483] 199.19 1.52 84.18 110.08 182.02 310.11 3076 1.00
## y_rep[484] 311.01 2.32 128.74 170.34 285.82 487.50 3091 1.00
## y_rep[485] 501.61 4.01 210.26 277.43 461.14 774.08 2746 1.00
## y_rep[486] 302.60 2.27 123.58 167.07 281.73 464.62 2970 1.00
## y_rep[487] 619.94 5.15 258.38 343.75 573.47 950.87 2514 1.00
## y_rep[488] 224.84 1.72 91.90 123.05 209.65 346.15 2859 1.00
## y_rep[489] 448.85 3.23 184.17 250.61 411.84 685.70 3245 1.00
## y_rep[490] 334.38 2.63 137.08 185.30 308.82 515.88 2716 1.00
## y_rep[491] 446.39 3.21 177.91 248.19 416.50 679.05 3078 1.00
## y_rep[492] 504.80 4.22 217.17 278.13 463.58 783.23 2649 1.00
## y_rep[493] 203.65 1.54 84.46 114.18 187.83 313.05 3001 1.00
## y_rep[494] 196.50 1.58 81.95 109.88 179.50 301.88 2700 1.00
## y_rep[495] 294.22 2.18 121.04 162.81 271.04 453.52 3072 1.00
## y_rep[496] 690.99 5.15 279.51 383.08 639.40 1068.46 2949 1.00
## y_rep[497] 204.41 1.58 85.42 112.78 188.94 316.81 2929 1.00
## y_rep[498] 633.51 4.95 265.22 352.55 584.37 969.73 2872 1.00
## y_rep[499] 306.09 2.35 124.90 172.13 287.53 468.55 2819 1.00
## y_rep[500] 298.60 2.23 123.55 163.62 274.46 460.70 3070 1.00
## y_rep[501] 205.57 1.44 84.18 112.21 191.01 316.77 3409 1.00
## y_rep[502] 687.51 5.28 288.32 380.31 625.73 1046.35 2984 1.00
## y_rep[503] 491.29 3.69 204.97 273.10 455.58 764.07 3089 1.00
## y_rep[504] 451.22 3.39 183.16 247.63 422.74 684.06 2916 1.00
## y_rep[505] 635.62 4.77 264.81 350.75 584.98 979.50 3084 1.00
## y_rep[506] 199.41 1.48 81.96 112.06 184.10 306.55 3081 1.00
## y_rep[507] 334.10 2.54 136.88 183.24 310.75 506.66 2906 1.00
## y_rep[508] 226.62 1.74 93.74 124.99 210.20 347.49 2916 1.00
## y_rep[509] 308.55 2.28 124.88 171.88 285.23 476.92 3001 1.00
## y_rep[510] 302.24 2.23 122.74 169.74 280.40 459.38 3034 1.00
## y_rep[511] 197.69 1.52 82.61 108.83 182.67 304.30 2941 1.00
## y_rep[512] 630.68 4.83 262.14 352.92 583.01 969.91 2944 1.00
## y_rep[513] 206.10 1.64 84.46 116.06 189.39 314.63 2654 1.00
## y_rep[514] 314.57 2.58 134.36 170.69 287.65 491.76 2711 1.00
## y_rep[515] 282.43 2.15 117.62 155.86 261.52 439.49 2993 1.00
## y_rep[516] 591.66 4.53 244.44 324.26 547.01 915.33 2915 1.00
## y_rep[517] 181.78 1.37 76.38 99.10 167.90 280.72 3099 1.00
## y_rep[518] 428.48 3.30 175.77 237.96 396.99 657.87 2843 1.00
## y_rep[519] 186.74 1.37 76.41 103.07 173.38 286.77 3132 1.00
## y_rep[520] 420.72 3.33 176.84 227.87 389.51 653.24 2813 1.00
## y_rep[521] 187.32 1.39 76.71 102.79 172.32 287.84 3035 1.00
## y_rep[522] 608.01 4.57 250.73 333.00 568.01 934.85 3011 1.00
## y_rep[523] 280.53 2.24 120.14 154.12 254.77 442.28 2880 1.00
## y_rep[524] 664.97 5.25 288.77 358.13 609.78 1042.69 3020 1.00
## y_rep[525] 470.13 3.64 197.83 263.97 431.54 726.27 2956 1.00
## y_rep[526] 589.84 4.67 250.07 329.15 547.08 908.38 2862 1.00
## y_rep[527] 462.33 3.47 192.74 254.10 427.44 709.39 3085 1.00
## y_rep[528] 574.83 4.25 235.78 314.55 534.52 875.07 3072 1.00
## y_rep[529] 273.35 2.08 113.62 150.16 251.96 421.61 2992 1.00
## y_rep[530] 279.48 2.07 114.03 157.32 257.83 427.74 3037 1.00
## y_rep[531] 186.08 1.50 79.78 102.97 171.10 287.39 2815 1.00
## y_rep[532] 410.51 3.14 171.12 226.37 377.66 634.24 2962 1.00
## y_rep[533] 416.12 3.14 170.01 228.40 386.16 646.84 2939 1.00
## y_rep[534] 206.80 1.57 86.30 115.47 190.17 317.99 3021 1.00
## y_rep[535] 283.17 2.31 117.45 151.76 264.39 435.74 2587 1.00
## y_rep[536] 661.92 5.21 279.79 368.25 612.87 1025.43 2880 1.00
## y_rep[537] 178.62 1.34 74.32 97.70 164.78 275.26 3088 1.00
## y_rep[538] 309.57 2.31 128.15 174.25 283.72 478.58 3081 1.00
## y_rep[539] 187.63 1.47 78.01 104.81 172.49 286.64 2819 1.00
## y_rep[540] 462.78 3.60 191.62 258.32 431.78 703.26 2840 1.00
## y_rep[541] 415.03 3.04 170.01 231.35 382.26 631.48 3137 1.00
## y_rep[542] 584.61 4.44 241.62 321.62 542.06 908.78 2955 1.00
## y_rep[543] 426.57 3.16 175.31 237.46 395.63 658.85 3075 1.00
## y_rep[544] 283.27 2.13 118.17 157.08 262.00 434.60 3084 1.00
## y_rep[545] 191.36 1.54 80.60 105.59 176.65 299.92 2733 1.00
## y_rep[546] 181.96 1.46 78.07 100.01 166.90 286.96 2865 1.00
## y_rep[547] 287.36 2.21 122.09 157.79 262.94 451.42 3047 1.00
## y_rep[548] 419.85 3.26 179.81 227.46 387.02 653.29 3047 1.00
## y_rep[549] 275.68 2.10 114.70 153.24 255.01 423.63 2992 1.00
## y_rep[550] 602.44 4.62 253.25 331.88 553.66 948.43 3010 1.00
## y_rep[551] 590.26 4.40 245.51 322.18 550.62 904.40 3106 1.00
## y_rep[552] 187.46 1.47 78.62 103.66 172.46 290.22 2844 1.00
## y_rep[553] 658.18 5.11 273.78 362.25 609.44 1014.13 2868 1.00
## y_rep[554] 313.14 2.32 131.03 173.05 287.88 489.54 3199 1.00
## y_rep[555] 202.11 1.69 83.89 111.33 186.29 311.67 2471 1.00
## y_rep[556] 281.75 2.10 113.92 156.70 260.87 431.84 2942 1.00
## y_rep[557] 189.93 1.47 78.77 105.38 174.36 292.53 2883 1.00
## y_rep[558] 408.99 3.20 171.50 221.85 379.97 636.04 2881 1.00
## y_rep[559] 205.88 1.54 85.75 114.02 190.53 319.38 3093 1.00
## y_rep[560] 182.44 1.47 78.56 98.45 167.97 284.03 2852 1.00
## y_rep[561] 584.37 4.66 241.15 324.49 542.59 889.91 2680 1.00
## y_rep[562] 596.73 4.77 253.47 327.37 549.45 915.88 2829 1.00
## y_rep[563] 313.66 2.38 130.50 172.35 289.78 488.28 3002 1.00
## y_rep[564] 425.09 3.22 173.36 241.29 390.94 652.16 2890 1.00
## y_rep[565] 274.35 2.15 117.82 148.03 252.95 422.60 2995 1.00
## y_rep[566] 184.79 1.56 79.17 100.30 170.56 283.74 2582 1.00
## y_rep[567] 281.18 2.24 118.73 155.16 259.44 438.17 2811 1.00
## y_rep[568] 185.18 1.46 78.46 102.06 169.15 285.93 2883 1.00
## y_rep[569] 574.82 4.64 241.89 319.25 523.14 893.26 2713 1.00
## y_rep[570] 284.82 2.10 115.99 160.74 260.74 435.50 3046 1.00
## y_rep[571] 422.67 3.13 175.02 235.50 389.27 649.58 3130 1.00
## y_rep[572] 591.82 4.49 242.87 329.07 550.89 898.95 2931 1.00
## y_rep[573] 284.66 2.25 119.22 158.21 263.68 432.30 2803 1.00
## y_rep[574] 426.17 3.42 183.61 230.45 390.02 657.33 2875 1.00
## y_rep[575] 464.74 3.56 196.53 255.72 426.89 718.33 3044 1.00
## y_rep[576] 204.70 1.54 85.25 111.07 188.89 317.40 3058 1.00
## y_rep[577] 273.44 2.10 113.03 151.64 255.31 421.65 2888 1.00
## y_rep[578] 311.24 2.41 129.33 169.94 288.26 482.52 2884 1.00
## y_rep[579] 652.26 5.07 277.71 353.36 600.82 1005.26 3002 1.00
## y_rep[580] 182.02 1.44 76.70 101.19 167.50 276.27 2828 1.00
## y_rep[581] 598.00 4.45 246.56 335.71 550.96 914.60 3065 1.00
## y_rep[582] 409.04 3.12 171.04 228.65 376.67 635.02 3011 1.00
## y_rep[583] 187.63 1.44 76.46 103.28 175.48 288.23 2801 1.00
## y_rep[584] 187.96 1.54 79.22 104.27 172.71 291.29 2654 1.00
## y_rep[585] 275.80 2.08 114.50 154.05 254.62 423.00 3042 1.00
## y_rep[586] 205.74 1.53 84.95 114.42 190.41 318.41 3067 1.00
## y_rep[587] 463.01 3.77 188.36 255.73 431.79 715.23 2499 1.00
## y_rep[588] 309.99 2.41 132.73 173.07 281.30 482.24 3029 1.00
## y_rep[589] 419.44 3.38 176.45 232.17 382.35 660.51 2731 1.00
## y_rep[590] 272.69 2.20 118.40 147.71 249.18 426.90 2907 1.00
## y_rep[591] 602.65 4.79 251.61 330.85 554.72 935.79 2761 1.00
## y_rep[592] 659.81 5.11 276.28 359.43 607.64 1017.83 2918 1.00
## y_rep[593] 286.08 2.21 120.43 160.80 261.05 441.61 2966 1.00
## y_rep[594] 423.38 3.09 175.46 234.39 390.17 645.67 3232 1.00
## y_rep[595] 180.76 1.37 77.26 100.40 164.71 282.67 3177 1.00
## y_rep[596] 186.47 1.60 76.96 102.27 172.18 287.45 2323 1.00
## y_rep[597] 592.59 4.51 245.86 330.29 545.42 908.52 2976 1.00
## y_rep[598] 587.00 4.32 243.04 322.63 542.23 903.20 3163 1.00
## y_rep[599] 462.32 3.65 192.58 258.64 427.43 724.49 2785 1.00
## y_rep[600] 205.89 1.56 86.66 113.72 189.74 319.20 3071 1.00
## y_rep[601] 602.48 4.61 249.58 330.73 556.64 919.13 2926 1.00
## y_rep[602] 287.96 2.27 120.83 157.31 265.81 442.23 2825 1.00
## y_rep[603] 186.88 1.46 78.14 102.33 171.81 286.71 2864 1.00
## y_rep[604] 275.78 2.22 118.01 150.90 254.26 424.07 2819 1.00
## y_rep[605] 425.50 3.18 173.48 235.65 395.36 651.05 2976 1.00
## y_rep[606] 653.10 5.12 276.51 353.33 605.98 1010.86 2921 1.00
## y_rep[607] 182.28 1.36 75.16 100.04 169.62 277.28 3070 1.00
## y_rep[608] 406.94 2.98 166.92 225.28 377.02 619.64 3133 1.00
## y_rep[609] 185.17 1.37 74.88 101.82 171.63 287.43 2970 1.00
## y_rep[610] 308.58 2.51 126.09 172.47 286.70 476.20 2533 1.00
## y_rep[611] 660.83 5.04 275.68 364.12 606.45 1020.82 2986 1.00
## y_rep[612] 280.68 2.33 120.45 156.69 255.42 430.66 2672 1.00
## y_rep[613] 205.88 1.52 83.22 115.31 190.53 314.79 2999 1.00
## y_rep[614] 593.18 4.52 242.42 330.19 547.98 903.41 2873 1.00
## y_rep[615] 424.70 3.29 179.40 234.01 391.88 654.06 2968 1.00
## y_rep[616] 418.35 3.22 172.03 230.80 387.33 648.48 2847 1.00
## y_rep[617] 286.19 2.29 120.60 158.23 261.18 442.63 2785 1.00
## y_rep[618] 310.73 2.37 126.05 175.56 287.58 477.72 2824 1.00
## y_rep[619] 605.13 4.88 252.40 337.31 555.84 939.98 2673 1.00
## y_rep[620] 183.19 1.38 76.41 101.34 169.15 279.76 3076 1.00
## y_rep[621] 183.78 1.36 74.62 102.84 170.81 280.20 3023 1.00
## y_rep[622] 463.15 3.52 189.55 258.27 433.37 708.64 2901 1.00
## y_rep[623] 187.57 1.48 78.66 103.90 172.27 289.37 2820 1.00
## y_rep[624] 308.87 2.28 123.82 171.83 285.52 475.63 2957 1.00
## y_rep[625] 662.24 5.12 280.61 375.62 605.89 1016.12 3008 1.00
## y_rep[626] 190.52 1.54 81.05 105.46 174.34 293.75 2766 1.00
## y_rep[627] 280.92 2.10 115.57 157.34 260.26 431.72 3015 1.00
## y_rep[628] 606.31 4.63 249.68 334.39 560.85 926.92 2911 1.00
## y_rep[629] 271.40 1.97 108.74 151.40 250.74 419.03 3061 1.00
## y_rep[630] 179.45 1.33 72.36 100.63 167.15 274.74 2947 1.00
## y_rep[631] 423.14 3.46 176.89 235.41 391.52 652.57 2610 1.00
## y_rep[632] 464.71 3.52 193.23 257.32 428.51 715.70 3015 1.00
## y_rep[633] 283.82 2.11 115.44 154.35 263.34 437.97 2989 1.00
## y_rep[634] 202.85 1.55 85.08 112.97 189.56 309.29 3009 1.00
## y_rep[635] 186.12 1.53 76.83 101.57 171.79 290.86 2520 1.00
## y_rep[636] 196.60 1.48 79.91 108.67 183.86 297.91 2917 1.00
## y_rep[637] 198.99 1.50 81.82 110.70 183.36 306.16 2987 1.00
## y_rep[638] 632.38 5.03 268.48 348.27 573.53 987.63 2847 1.00
## y_rep[639] 449.56 3.40 189.82 248.17 413.15 699.41 3110 1.00
## y_rep[640] 301.22 2.38 124.33 169.56 277.37 461.56 2740 1.00
## y_rep[641] 296.50 2.30 123.55 167.67 273.94 448.70 2879 1.00
## y_rep[642] 194.97 1.45 80.71 107.85 178.71 303.95 3112 1.00
## y_rep[643] 302.63 2.31 125.38 167.54 278.70 466.16 2937 1.00
## y_rep[644] 293.77 2.21 121.47 163.85 271.32 456.76 3023 1.00
## y_rep[645] 443.11 3.52 189.40 244.95 408.80 675.99 2901 1.00
## y_rep[646] 626.64 4.88 262.51 350.89 573.63 964.73 2895 1.00
## y_rep[647] 445.99 3.46 188.57 246.36 410.29 686.83 2970 1.00
## y_rep[648] 644.72 5.29 264.71 353.53 598.23 984.50 2499 1.00
## y_rep[649] 303.57 2.35 128.79 162.74 280.66 469.25 2994 1.00
## y_rep[650] 640.53 4.73 267.32 356.79 593.21 986.06 3191 1.00
## y_rep[651] 716.46 5.73 309.34 385.82 656.63 1115.30 2915 1.00
## y_rep[652] 655.94 5.09 275.31 360.48 605.80 1010.26 2930 1.00
## y_rep[653] 441.89 3.53 188.35 244.53 404.70 687.99 2840 1.00
## y_rep[654] 459.71 3.60 189.43 256.87 427.13 700.53 2777 1.00
## y_rep[655] 295.03 2.23 121.16 164.46 270.95 458.25 2951 1.00
## y_rep[656] 636.87 4.84 264.16 349.95 586.45 977.34 2974 1.00
## y_rep[657] 338.91 2.76 145.78 188.31 309.58 524.53 2796 1.00
## y_rep[658] 306.29 2.49 126.95 171.62 283.64 472.79 2598 1.00
## y_rep[659] 493.96 3.85 205.57 270.58 455.96 763.04 2849 1.00
## y_rep[660] 451.32 3.47 190.09 249.05 413.54 704.88 2996 1.00
## y_rep[661] 440.50 3.23 180.97 241.81 407.67 682.29 3134 1.00
## y_rep[662] 504.21 3.78 203.20 282.29 470.00 770.75 2886 1.00
## y_rep[663] 332.56 2.55 137.86 181.66 310.16 512.20 2933 1.00
## y_rep[664] 295.25 2.31 126.02 159.27 274.84 457.22 2965 1.00
## y_rep[665] 711.95 5.12 289.08 400.71 660.90 1082.23 3193 1.00
## y_rep[666] 643.24 5.30 271.85 352.37 595.66 994.72 2632 1.00
## y_rep[667] 193.18 1.51 80.42 108.33 178.70 296.51 2821 1.00
## y_rep[668] 306.65 2.36 130.41 166.84 281.21 477.30 3046 1.00
## y_rep[669] 655.22 5.18 279.83 349.69 605.79 1011.96 2921 1.00
## y_rep[670] 332.43 2.50 137.45 185.39 305.60 514.45 3011 1.00
## y_rep[671] 197.93 1.58 79.80 108.80 185.40 301.85 2560 1.00
## y_rep[672] 504.89 3.86 208.73 280.32 462.63 782.06 2917 1.00
## y_rep[673] 220.12 1.83 90.16 123.48 205.40 334.06 2438 1.00
## y_rep[674] 716.23 5.72 298.72 400.84 653.56 1096.56 2723 1.00
## y_rep[675] 470.12 3.72 199.99 255.39 432.94 733.03 2894 1.00
## y_rep[676] 200.03 1.57 83.64 108.02 183.45 312.28 2843 1.00
## y_rep[677] 200.50 1.51 84.63 109.87 185.25 307.90 3140 1.00
## y_rep[678] 501.76 3.77 208.18 280.41 464.49 779.44 3057 1.00
## y_rep[679] 306.07 2.49 127.60 169.74 280.37 472.40 2628 1.00
## y_rep[680] 198.24 1.48 82.09 109.82 183.21 302.32 3093 1.00
## y_rep[681] 651.71 4.91 272.21 357.90 598.85 1017.50 3068 1.00
## y_rep[682] 453.95 3.52 192.42 250.35 418.31 700.92 2983 1.00
## y_rep[683] 217.80 1.80 91.99 118.88 201.84 332.87 2609 1.00
## y_rep[684] 192.93 1.42 78.66 107.99 177.78 298.09 3070 1.00
## y_rep[685] 331.72 2.52 135.67 180.23 311.64 508.84 2905 1.00
## y_rep[686] 706.13 5.46 297.67 384.40 649.21 1098.70 2972 1.00
## y_rep[687] 194.40 1.50 82.48 106.50 178.51 304.18 3015 1.00
## y_rep[688] 654.62 5.01 276.26 359.91 599.77 1030.33 3046 1.00
## y_rep[689] 311.74 2.47 132.54 171.59 286.34 482.09 2884 1.00
## y_rep[690] 219.63 1.64 88.91 122.02 203.43 335.99 2932 1.00
## y_rep[691] 202.06 1.57 85.61 112.21 185.92 308.08 2984 1.00
## y_rep[692] 295.37 2.24 122.83 161.75 273.24 455.02 3014 1.00
## y_rep[693] 197.68 1.50 84.78 109.11 180.75 305.60 3182 1.00
## y_rep[694] 455.89 3.57 188.60 259.31 418.24 698.04 2790 1.00
## y_rep[695] 308.82 2.43 130.31 164.73 284.30 475.83 2881 1.00
## y_rep[696] 655.85 4.88 277.67 362.42 607.13 1014.91 3235 1.00
## y_rep[697] 194.80 1.46 80.10 108.51 181.95 297.38 3018 1.00
## y_rep[698] 218.16 1.60 89.41 122.62 201.54 335.00 3118 1.00
## y_rep[699] 294.40 2.23 122.12 163.14 270.69 458.31 2999 1.00
## y_rep[700] 462.35 3.70 189.86 253.31 426.91 708.01 2638 1.00
## y_rep[701] 309.38 2.32 128.40 170.18 284.68 483.85 3060 1.00
## y_rep[702] 205.65 1.69 86.59 114.14 190.48 316.96 2628 1.00
## y_rep[703] 200.03 1.54 82.50 111.65 185.05 310.79 2871 1.00
## y_rep[704] 331.27 2.44 136.05 182.05 308.51 508.23 3101 1.00
## y_rep[705] 334.26 2.60 144.02 185.46 302.78 527.26 3061 1.00
## y_rep[706] 301.09 2.29 126.62 165.33 278.50 473.39 3064 1.00
## y_rep[707] 196.65 1.49 81.17 108.43 183.25 302.45 2986 1.00
## y_rep[708] 193.80 1.49 78.77 108.00 181.43 291.04 2813 1.00
## y_rep[709] 201.21 1.57 83.53 109.86 187.84 309.11 2847 1.00
## y_rep[710] 650.37 5.11 270.03 351.69 599.78 1001.58 2792 1.00
## y_rep[711] 452.99 3.74 187.61 247.30 420.10 689.53 2517 1.00
## y_rep[712] 714.84 5.61 301.17 395.36 662.52 1075.16 2882 1.00
## y_rep[713] 218.25 1.72 90.25 123.78 200.69 334.41 2739 1.00
## y_rep[714] 295.48 2.34 124.86 161.64 269.99 455.72 2844 1.00
## y_rep[715] 497.88 3.71 202.64 282.63 461.59 758.84 2979 1.00
## y_rep[716] 307.94 2.37 128.27 173.40 281.69 477.38 2919 1.00
## y_rep[717] 297.55 2.34 120.37 166.88 275.45 451.92 2641 1.00
## y_rep[718] 455.79 3.49 191.20 247.47 422.37 696.91 3003 1.00
## y_rep[719] 310.54 2.29 127.47 173.17 287.62 483.27 3100 1.00
## y_rep[720] 206.93 1.56 86.53 113.83 190.96 314.80 3062 1.00
## y_rep[721] 500.94 3.69 206.43 278.32 461.00 779.95 3131 1.00
## y_rep[722] 341.54 2.68 142.91 191.10 315.32 521.53 2841 1.00
## y_rep[723] 227.43 1.71 90.75 126.52 212.84 345.25 2829 1.00
## y_rep[724] 461.75 3.45 192.31 255.13 425.28 718.65 3100 1.00
## y_rep[725] 620.57 4.66 255.36 351.91 571.14 961.42 3007 1.00
## y_rep[726] 201.49 1.50 83.49 109.30 186.61 307.03 3098 1.00
## y_rep[727] 209.39 1.60 85.56 115.88 194.59 323.41 2850 1.00
## y_rep[728] 684.60 5.34 285.58 374.37 631.02 1064.35 2861 1.00
## y_rep[729] 624.73 4.91 258.72 347.21 581.56 964.51 2779 1.00
## y_rep[730] 313.73 2.86 135.00 173.45 284.58 493.31 2228 1.00
## y_rep[731] 228.73 1.79 94.97 127.10 211.27 355.58 2804 1.00
## y_rep[732] 311.03 2.48 130.91 175.92 285.82 481.99 2777 1.00
## y_rep[733] 450.34 3.27 184.89 247.95 418.24 699.85 3190 1.00
## y_rep[734] 449.77 3.53 188.94 245.79 414.25 696.00 2857 1.00
## y_rep[735] 301.83 2.29 123.98 165.96 279.71 463.25 2918 1.00
## y_rep[736] 200.55 1.58 81.53 110.44 185.87 309.03 2660 1.00
## y_rep[737] 208.35 1.55 86.08 116.47 192.02 320.57 3082 1.00
## y_rep[738] 624.46 4.94 258.90 348.48 574.50 972.59 2744 1.00
## y_rep[739] 614.79 4.69 247.95 345.78 569.76 934.83 2792 1.00
## y_rep[740] 208.59 1.68 87.49 114.38 193.00 318.69 2713 1.00
## y_rep[741] 201.38 1.44 81.60 112.16 188.75 311.96 3217 1.00
## y_rep[742] 204.70 1.51 83.28 115.29 190.99 315.82 3034 1.00
## y_rep[743] 448.77 3.43 186.23 243.94 415.20 690.86 2944 1.00
## y_rep[744] 501.75 3.79 208.88 276.66 465.68 766.01 3032 1.00
## y_rep[745] 341.06 2.72 143.64 188.87 316.73 523.41 2796 1.00
## y_rep[746] 302.30 2.31 126.01 165.61 280.52 459.55 2976 1.00
## y_rep[747] 632.45 4.82 259.59 354.18 578.50 976.77 2899 1.00
## y_rep[748] 229.60 1.73 96.92 125.24 210.55 356.96 3150 1.00
## y_rep[749] 309.48 2.32 127.91 172.22 286.74 479.42 3035 1.00
## y_rep[750] 624.99 4.87 263.84 339.71 575.94 967.24 2936 1.00
## y_rep[751] 313.82 2.56 134.24 171.93 288.34 486.73 2752 1.00
## y_rep[752] 456.98 3.55 186.06 255.69 421.43 695.16 2745 1.00
## y_rep[753] 205.65 1.52 84.99 114.25 189.00 317.26 3128 1.00
## y_rep[754] 689.85 5.22 280.93 383.73 642.98 1052.93 2892 1.00
## y_rep[755] 230.53 1.93 97.24 125.41 211.91 359.11 2539 1.00
## y_rep[756] 632.82 5.07 264.73 346.19 584.63 969.74 2724 1.00
## y_rep[757] 337.52 2.57 136.32 193.86 311.00 511.44 2809 1.00
## y_rep[758] 300.02 2.43 125.47 168.18 278.54 455.23 2676 1.00
## y_rep[759] 309.14 2.33 127.58 171.10 286.71 471.73 3007 1.00
## y_rep[760] 446.31 3.41 187.87 245.96 412.33 691.03 3032 1.00
## y_rep[761] 201.92 1.55 84.86 110.37 185.18 311.52 3002 1.00
## y_rep[762] 607.84 4.52 247.67 337.07 563.53 935.17 3008 1.00
## y_rep[763] 451.69 3.31 183.19 253.44 417.46 687.28 3067 1.00
## y_rep[764] 686.99 5.06 283.27 384.73 632.53 1057.27 3129 1.00
## y_rep[765] 625.38 4.91 265.17 337.90 575.23 970.45 2912 1.00
## y_rep[766] 315.35 2.43 131.81 172.67 292.03 484.76 2949 1.00
## y_rep[767] 501.11 3.72 201.92 279.06 466.07 766.15 2952 1.00
## y_rep[768] 209.04 1.59 85.47 116.03 194.67 321.90 2891 1.00
## y_rep[769] 208.87 1.56 85.25 117.31 193.11 323.64 3002 1.00
## y_rep[770] 457.18 3.50 191.22 253.14 421.29 706.32 2980 1.00
## y_rep[771] 225.52 1.61 91.62 125.80 209.12 344.22 3249 1.00
## y_rep[772] 460.36 3.66 194.33 256.20 423.13 710.88 2827 1.00
## y_rep[773] 206.70 1.58 86.29 113.91 192.19 313.99 2996 1.00
## y_rep[774] 302.89 2.27 124.92 171.05 279.62 460.83 3034 1.00
## y_rep[775] 633.18 4.85 264.36 354.49 579.87 965.71 2975 1.00
## y_rep[776] 445.08 3.32 182.97 250.99 411.26 691.64 3043 1.00
## y_rep[777] 335.81 2.57 135.98 191.41 310.50 508.74 2794 1.00
## y_rep[778] 201.77 1.51 82.51 113.74 188.92 304.83 3000 1.00
## y_rep[779] 209.14 1.54 85.68 116.56 195.34 323.70 3093 1.00
## y_rep[780] 299.35 2.31 125.99 166.64 276.91 454.43 2970 1.00
## y_rep[781] 628.60 5.04 260.61 351.38 577.16 967.27 2672 1.00
## y_rep[782] 307.54 2.32 125.15 170.63 286.43 471.72 2922 1.00
## y_rep[783] 202.59 1.70 86.43 111.79 186.64 310.18 2597 1.00
## y_rep[784] 228.88 1.85 98.45 125.94 209.70 358.04 2847 1.00
## y_rep[785] 308.64 2.36 128.53 173.12 282.94 476.46 2967 1.00
## y_rep[786] 208.50 1.56 85.77 117.08 192.03 320.06 3011 1.00
## y_rep[787] 628.86 5.03 267.89 345.70 576.93 978.92 2835 1.00
## y_rep[788] 687.28 5.82 293.42 381.70 631.70 1059.29 2540 1.00
## y_rep[789] 297.60 2.29 125.54 164.04 272.34 457.11 3015 1.00
## y_rep[790] 206.39 1.59 88.15 112.09 189.49 322.34 3082 1.00
## y_rep[791] 443.69 3.42 183.51 249.43 410.19 685.01 2885 1.00
## y_rep[792] 441.57 3.55 184.24 243.33 408.47 688.32 2690 1.00
## y_rep[793] 500.44 3.75 207.13 273.70 467.89 771.23 3056 1.00
## y_rep[794] 339.93 2.62 140.15 189.84 314.46 510.17 2863 1.00
## y_rep[795] 230.28 1.75 95.10 130.52 211.33 354.78 2951 1.00
## y_rep[796] 502.86 4.04 205.96 277.97 465.00 779.35 2602 1.00
## y_rep[797] 311.70 2.20 123.05 174.17 289.96 477.99 3117 1.00
## y_rep[798] 633.12 4.52 260.70 343.10 587.91 969.77 3327 1.00
## y_rep[799] 210.88 1.62 88.61 116.72 194.31 327.69 3009 1.00
## y_rep[800] 460.48 3.46 190.81 253.62 428.06 716.55 3043 1.00
## y_rep[801] 207.89 1.56 86.31 114.65 192.17 317.28 3054 1.00
## y_rep[802] 455.07 3.65 192.46 252.43 420.02 689.88 2786 1.00
## y_rep[803] 610.34 4.60 252.24 340.47 567.95 941.66 3001 1.00
## y_rep[804] 624.88 4.59 255.65 347.77 575.72 967.24 3100 1.00
## y_rep[805] 338.21 2.61 142.46 186.77 309.43 516.85 2989 1.00
## y_rep[806] 447.27 3.48 188.37 245.88 410.81 684.11 2929 1.00
## y_rep[807] 200.20 1.50 81.19 110.73 187.63 308.49 2934 1.00
## y_rep[808] 308.32 2.27 125.05 172.20 286.20 475.28 3037 1.00
## y_rep[809] 301.69 2.25 122.05 170.70 280.66 455.93 2934 1.00
## y_rep[810] 300.11 2.28 127.44 163.60 275.53 467.17 3119 1.00
## y_rep[811] 436.40 3.39 183.65 240.59 396.00 673.03 2941 1.00
## y_rep[812] 309.41 2.42 127.16 169.58 285.12 479.58 2760 1.00
## y_rep[813] 628.20 4.71 256.93 349.35 583.20 960.69 2980 1.00
## y_rep[814] 636.57 4.74 261.27 349.30 588.22 980.55 3038 1.00
## y_rep[815] 198.55 1.51 81.39 109.98 183.93 302.28 2901 1.00
## y_rep[816] 688.17 5.21 288.72 381.52 631.54 1065.14 3073 1.00
## y_rep[817] 228.38 1.78 92.85 129.02 210.94 353.01 2715 1.00
## y_rep[818] 447.39 3.38 182.06 249.77 416.09 687.02 2909 1.00
## y_rep[819] 491.03 3.96 205.61 273.27 453.19 752.77 2692 1.00
## y_rep[820] 605.80 4.54 251.30 334.43 557.78 941.53 3065 1.00
## y_rep[821] 210.07 1.53 86.08 116.21 194.90 321.56 3172 1.00
## y_rep[822] 458.04 3.26 183.46 254.14 425.35 701.85 3167 1.00
## y_rep[823] 203.72 1.56 84.27 111.88 188.48 309.40 2934 1.00
## y_rep[824] 309.39 2.28 125.99 172.37 289.35 473.58 3053 1.00
## y_rep[825] 340.17 2.72 145.91 184.30 314.02 528.62 2874 1.00
## y_rep[826] 694.06 5.31 289.51 388.89 639.81 1078.41 2975 1.00
## y_rep[827] 227.91 1.87 96.82 127.37 208.08 346.90 2668 1.00
## y_rep[828] 304.11 2.25 125.21 169.22 281.95 466.91 3089 1.00
## y_rep[829] 455.14 3.36 187.07 254.15 418.77 706.72 3090 1.00
## y_rep[830] 300.28 2.38 124.73 166.43 277.48 463.04 2758 1.00
## y_rep[831] 462.80 3.50 195.08 256.31 424.98 717.19 3102 1.00
## y_rep[832] 501.24 3.81 210.67 280.16 463.02 770.70 3065 1.00
## y_rep[833] 210.18 1.61 88.25 115.42 193.05 324.50 3006 1.00
## y_rep[834] 313.24 2.32 130.58 171.97 291.22 476.48 3161 1.00
## y_rep[835] 336.54 2.45 135.69 192.22 311.79 515.53 3063 1.00
## y_rep[836] 200.95 1.57 84.20 110.24 185.25 309.07 2876 1.00
## y_rep[837] 633.40 5.00 264.15 348.97 588.08 980.32 2794 1.00
## y_rep[838] 623.98 4.80 260.85 346.38 571.89 950.14 2948 1.00
## y_rep[839] 204.40 1.64 86.04 114.33 187.35 314.58 2765 1.00
## y_rep[840] 311.27 2.31 128.13 173.26 287.26 474.75 3082 1.00
## y_rep[841] 230.86 1.76 95.92 126.67 213.57 356.48 2963 1.00
## y_rep[842] 631.38 4.71 259.25 348.05 587.49 983.93 3024 1.00
## y_rep[843] 635.95 4.91 270.49 350.21 584.99 993.91 3036 1.00
## y_rep[844] 335.02 2.52 138.19 185.87 309.52 507.66 3012 1.00
## y_rep[845] 454.46 3.56 185.43 254.85 418.41 695.44 2707 1.00
## y_rep[846] 604.31 4.42 246.85 340.33 557.92 927.28 3115 1.00
## y_rep[847] 200.71 1.50 82.33 112.39 184.61 309.38 3027 1.00
## y_rep[848] 436.66 3.34 181.31 240.10 399.89 674.60 2950 1.00
## y_rep[849] 697.12 5.11 288.43 390.08 643.53 1073.35 3189 1.00
## y_rep[850] 306.18 2.56 128.18 168.45 281.24 476.09 2511 1.00
## y_rep[851] 445.27 3.29 177.28 249.03 416.14 678.39 2902 1.00
## y_rep[852] 208.21 1.60 86.00 115.02 193.36 324.40 2875 1.00
## y_rep[853] 496.10 4.01 208.29 278.37 458.42 764.58 2699 1.00
## y_rep[854] 301.53 2.40 124.25 168.36 276.79 462.29 2678 1.00
## y_rep[855] 203.65 1.50 83.60 113.98 186.96 313.64 3094 1.00
## y_rep[856] 301.63 2.29 126.88 166.22 279.09 467.25 3061 1.00
## y_rep[857] 211.81 1.66 90.38 114.57 197.92 324.96 2966 1.00
## y_rep[858] 500.68 3.75 205.08 277.20 461.57 770.14 2994 1.00
## y_rep[859] 441.53 3.43 184.24 242.75 409.63 682.64 2886 1.00
## y_rep[860] 208.13 1.59 87.97 113.89 190.23 320.96 3080 1.00
## y_rep[861] 203.16 1.67 84.17 111.94 187.85 309.97 2554 1.00
## y_rep[862] 345.02 2.59 141.86 193.35 320.94 532.60 3011 1.00
## y_rep[863] 599.37 4.62 253.16 333.08 549.86 936.79 3003 1.00
## y_rep[864] 689.58 5.33 290.99 379.92 631.95 1086.18 2983 1.00
## y_rep[865] 224.98 1.77 93.19 123.69 206.49 347.70 2771 1.00
## y_rep[866] 498.63 3.73 208.61 275.85 460.96 759.24 3125 1.00
## y_rep[867] 618.60 4.43 251.74 340.75 571.10 961.98 3230 1.00
## y_rep[868] 211.12 1.58 87.09 116.12 194.94 327.38 3029 1.00
## y_rep[869] 316.53 2.42 134.19 176.55 290.64 482.12 3080 1.00
## y_rep[870] 636.57 4.95 268.44 350.93 582.95 984.27 2942 1.00
## y_rep[871] 687.43 5.45 290.80 379.02 633.69 1067.19 2850 1.00
## y_rep[872] 466.78 3.81 196.38 259.00 428.79 718.43 2654 1.00
## y_rep[873] 297.74 2.27 125.09 164.98 272.64 462.63 3034 1.00
## y_rep[874] 232.28 1.72 96.27 127.15 212.92 357.30 3125 1.00
## y_rep[875] 448.21 3.35 185.52 246.73 414.95 687.56 3058 1.00
## y_rep[876] 338.07 2.52 135.10 190.74 315.55 514.51 2881 1.00
## y_rep[877] 209.70 1.58 88.09 116.18 191.20 323.37 3097 1.00
## y_rep[878] 306.77 2.31 125.46 168.83 285.93 466.33 2946 1.00
## y_rep[879] 202.42 1.56 84.00 111.26 187.41 308.67 2917 1.00
## lp__ 389.00 0.27 5.09 382.48 388.92 395.47 365 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:18:33 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod3_1, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
Let’s call the correlation matrix \(\mathbf{R}\):
\[\mathbf{R} = \begin{pmatrix} 1 & \rho \\ \rho & 1 \end{pmatrix}\] If the prior was:
\[\mathbf{R} \sim \text{LKJcorr(2)}\]
There were more divergent transitions.
If the prior was:
\[\mathbf{R} \sim \text{LKJcorr(4)}\]
There were less divergent transitions, see the model below.
McElreath: “So whatever is the LKJcorr distribution? What LKJcorr(2) does is define a weakly informative prior on \(\rho\) that is skeptical of extreme correlations near −1 or 1. You can think of it as a regularizing prior for correlations. This distribution has a single parameter, \(\eta\), that controls how skeptical the prior is of large correlations in the matrix. When we use LKJcorr(1), the prior is flat over all valid correlation matrices. When the value is greater than 1, such as the 2 we used above, then extreme correlations are less likely. To visualize this family of priors, it will help to sample random matrices from it and plot the distribution of correlations”
Rho2 <- rlkjcorr( 1e4 , K=2 , eta=2 )
Rho1 <- rlkjcorr( 1e4 , K=2 , eta=1 )
Rho4 <- rlkjcorr( 1e4 , K=2 , eta=4 )
plot_grid(dens(Rho1[,1,2],xlim=c(-1,1),xlab="correlation" ,ylim=c(0,1.2),main="eta=1"),
dens(Rho2[,1,2] , xlab="correlation" ,xlim=c(-1,1),ylim=c(0,1.2),main="eta=2"),
dens(Rho4[,1,2],xlim=c(-1,1),xlab="correlation" ,ylim=c(0,1.2),main="eta=4"))
mod3_2Model with \(\mathbf{R} \sim \text{LKJcorr(4)}\)
mod3_2 = stan_model("mod3_2.stan")
fit.mod3_2 <- sampling(mod3_2, data = data.list_mod3 , iter = 3000, chains = 2, cores = 2, control=list(max_treedepth=14,adapt_delta=0.99))
print(fit.mod3_2, pars = c("beta_age","beta_age2",
"alpha","alpha_block", "sigma_block",
"Rho_prov","sigma_prov","alpha_prov","beta_prov","v_prov","SRS_prov",
"sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod3_2.
## 2 chains, each with iter=3000; warmup=1500; thin=1;
## post-warmup draws per chain=1500, total post-warmup draws=3000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.03 0.43 0.46 0.49 535 1.00
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 1732 1.00
## alpha 5.85 0.01 0.10 5.76 5.85 5.93 197 1.00
## alpha_block[1] -0.05 0.00 0.08 -0.12 -0.05 0.02 314 1.00
## alpha_block[2] -0.03 0.00 0.07 -0.10 -0.02 0.04 320 1.00
## alpha_block[3] -0.01 0.00 0.07 -0.08 -0.01 0.06 322 1.00
## alpha_block[4] 0.07 0.00 0.07 0.01 0.07 0.15 344 1.00
## sigma_block 0.11 0.00 0.09 0.04 0.08 0.20 492 1.00
## Rho_prov[1,1] 1.00 NaN 0.00 1.00 1.00 1.00 NaN NaN
## Rho_prov[1,2] -0.08 0.01 0.35 -0.54 -0.08 0.39 1879 1.00
## Rho_prov[2,1] -0.08 0.01 0.35 -0.54 -0.08 0.39 1879 1.00
## Rho_prov[2,2] 1.00 0.00 0.00 1.00 1.00 1.00 1961 1.00
## sigma_prov[1] 0.08 0.01 0.09 0.03 0.06 0.13 153 1.02
## sigma_prov[2] 0.03 0.00 0.04 0.00 0.02 0.07 442 1.01
## alpha_prov[1] 0.06 0.01 0.08 0.00 0.05 0.11 140 1.02
## alpha_prov[2] 0.01 0.01 0.08 -0.04 0.00 0.06 135 1.02
## alpha_prov[3] -0.06 0.01 0.08 -0.13 -0.06 -0.01 164 1.01
## alpha_prov[4] 0.02 0.01 0.08 -0.04 0.01 0.07 147 1.02
## alpha_prov[5] 0.02 0.01 0.08 -0.04 0.01 0.07 144 1.01
## beta_prov[1] -0.01 0.00 0.03 -0.04 -0.01 0.01 555 1.00
## beta_prov[2] 0.00 0.00 0.03 -0.03 0.00 0.03 547 1.00
## beta_prov[3] 0.01 0.00 0.03 -0.02 0.01 0.05 580 1.00
## beta_prov[4] 0.02 0.00 0.04 -0.01 0.01 0.07 466 1.00
## beta_prov[5] -0.01 0.00 0.03 -0.04 0.00 0.02 687 1.00
## v_prov[1,1] 0.06 0.01 0.08 0.00 0.05 0.11 140 1.02
## v_prov[1,2] -0.01 0.00 0.03 -0.04 -0.01 0.01 555 1.00
## v_prov[2,1] 0.01 0.01 0.08 -0.04 0.00 0.06 135 1.02
## v_prov[2,2] 0.00 0.00 0.03 -0.03 0.00 0.03 547 1.00
## v_prov[3,1] -0.06 0.01 0.08 -0.13 -0.06 -0.01 164 1.01
## v_prov[3,2] 0.01 0.00 0.03 -0.02 0.01 0.05 580 1.00
## v_prov[4,1] 0.02 0.01 0.08 -0.04 0.01 0.07 147 1.02
## v_prov[4,2] 0.02 0.00 0.04 -0.01 0.01 0.07 466 1.00
## v_prov[5,1] 0.02 0.01 0.08 -0.04 0.01 0.07 144 1.01
## v_prov[5,2] -0.01 0.00 0.03 -0.04 0.00 0.02 687 1.00
## SRS_prov[1,1] 0.01 0.01 0.09 0.00 0.00 0.02 200 1.01
## SRS_prov[1,2] 0.00 0.00 0.00 0.00 0.00 0.00 2944 1.00
## SRS_prov[2,1] 0.00 0.00 0.00 0.00 0.00 0.00 2944 1.00
## SRS_prov[2,2] 0.00 0.00 0.01 0.00 0.00 0.01 850 1.01
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 1900 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:20:13 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod3_2, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
Statistical rethinking (first version):
m13_6NC1P405
Stan code here: Code model using non-centered parameterization.
mod3_3 = stan_model("mod3_3.stan")
fit.mod3_3 <- sampling(mod3_3, data = data.list_mod3 , iter = 3000, chains = 2, cores = 2, control=list(max_treedepth=14,adapt_delta=0.99))
print(fit.mod3_3, pars = c("beta_age","beta_age2",
"alpha","z_alpha_block", "sigma_block",
"Rho_prov","sigma_prov","z_alpha_prov","z_beta_prov","v_prov",
"sigma_y"), probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod3_3.
## 2 chains, each with iter=3000; warmup=1500; thin=1;
## post-warmup draws per chain=1500, total post-warmup draws=3000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.47 0.00 0.03 0.44 0.47 0.50 1105 1.00
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 2963 1.00
## alpha 5.85 0.00 0.08 5.76 5.85 5.93 1212 1.01
## z_alpha_block[1] -0.61 0.02 0.64 -1.44 -0.59 0.20 1506 1.00
## z_alpha_block[2] -0.28 0.01 0.59 -1.05 -0.27 0.46 1540 1.00
## z_alpha_block[3] -0.11 0.01 0.59 -0.88 -0.09 0.62 1535 1.00
## z_alpha_block[4] 1.02 0.02 0.72 0.13 0.99 1.95 1509 1.00
## sigma_block 0.11 0.00 0.09 0.04 0.08 0.20 961 1.00
## Rho_prov[1,1] 1.00 NaN 0.00 1.00 1.00 1.00 NaN NaN
## Rho_prov[1,2] -0.06 0.01 0.32 -0.49 -0.06 0.38 2168 1.00
## Rho_prov[2,1] -0.06 0.01 0.32 -0.49 -0.06 0.38 2168 1.00
## Rho_prov[2,2] 1.00 0.00 0.00 1.00 1.00 1.00 1105 1.00
## sigma_prov[1] 0.08 0.00 0.06 0.03 0.06 0.13 939 1.00
## sigma_prov[2] 0.03 0.00 0.03 0.00 0.02 0.07 814 1.01
## z_alpha_prov[1] 0.80 0.02 0.65 0.01 0.77 1.62 1629 1.00
## z_alpha_prov[2] 0.07 0.02 0.63 -0.73 0.07 0.87 1595 1.00
## z_alpha_prov[3] -1.03 0.02 0.73 -1.97 -1.01 -0.12 1737 1.00
## z_alpha_prov[4] 0.13 0.01 0.68 -0.74 0.13 0.99 2277 1.00
## z_alpha_prov[5] 0.20 0.02 0.65 -0.59 0.19 0.99 1766 1.00
## z_beta_prov[1] -0.37 0.02 0.78 -1.31 -0.39 0.59 2124 1.00
## z_beta_prov[2] -0.05 0.02 0.80 -1.03 -0.08 0.97 2455 1.00
## z_beta_prov[3] 0.26 0.02 0.84 -0.82 0.26 1.32 2423 1.00
## z_beta_prov[4] 0.50 0.02 0.85 -0.58 0.52 1.58 2313 1.00
## z_beta_prov[5] -0.28 0.02 0.81 -1.29 -0.29 0.71 2047 1.00
## v_prov[1,1] 0.80 0.02 0.65 0.01 0.77 1.62 1629 1.00
## v_prov[1,2] -0.37 0.02 0.78 -1.31 -0.39 0.59 2124 1.00
## v_prov[2,1] 0.07 0.02 0.63 -0.73 0.07 0.87 1595 1.00
## v_prov[2,2] -0.05 0.02 0.80 -1.03 -0.08 0.97 2455 1.00
## v_prov[3,1] -1.03 0.02 0.73 -1.97 -1.01 -0.12 1737 1.00
## v_prov[3,2] 0.26 0.02 0.84 -0.82 0.26 1.32 2423 1.00
## v_prov[4,1] 0.13 0.01 0.68 -0.74 0.13 0.99 2277 1.00
## v_prov[4,2] 0.50 0.02 0.85 -0.58 0.52 1.58 2313 1.00
## v_prov[5,1] 0.20 0.02 0.65 -0.59 0.19 0.99 1766 1.00
## v_prov[5,2] -0.28 0.02 0.81 -1.29 -0.29 0.71 2047 1.00
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 3477 1.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:22:03 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod3_3, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
posterior_cp <- as.array(fit.mod3_3)
np_cp <- nuts_params(fit.mod3_3)
mcmc_trace(posterior_cp, pars =c( "alpha","sigma_prov[1]"), np = np_cp) +
xlab("Post-warmup iteration")
## No divergences to plot.
mcmc_pairs(posterior_cp, np = np_cp, pars = c("alpha","beta_age","beta_age2","sigma_y","sigma_prov[1]","sigma_prov[2]"),
off_diag_args = list(size = 1, alpha = 1/3),np_style = pairs_style_np(div_size=3, div_shape = 19))
Comment: I tried \(\alpha \sim \mathcal{N}(0,1)\), chains didn’t mix. Very high R-hat values and lots of divergent transitions.
Sorensen et al. 2016. Listing 8.
data.list_mod3_4 <- list(N=length(data$height), # Number of observations
y=data$height, # Response variables
age=data$age.sc, # Tree age
nprov=length(unique(data$prov)), # Number of provenances
nblock=length(unique(data$block)), # Number of blocks
prov=as.numeric(data$prov), # Provenances
bloc=as.numeric(data$block)) # Blocks
mod3_4 = stan_model("mod3_4.stan")
fit.mod3_4 <- sampling(mod3_4, data = data.list_mod3_4 , iter = 2000, chains = 2, cores = 2, control=list(max_treedepth=14,adapt_delta=0.999))
## Warning: There were 1 chains where the estimated Bayesian Fraction of Missing Information was low. See
## http://mc-stan.org/misc/warnings.html#bfmi-low
## Warning: Examine the pairs() plot to diagnose sampling problems
## Warning: The largest R-hat is 1.48, indicating chains have not mixed.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#r-hat
## Warning: Bulk Effective Samples Size (ESS) is too low, indicating posterior means and medians may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#bulk-ess
## Warning: Tail Effective Samples Size (ESS) is too low, indicating posterior variances and tail quantiles may be unreliable.
## Running the chains for more iterations may help. See
## http://mc-stan.org/misc/warnings.html#tail-ess
print(fit.mod3_4, probs = c(0.10, 0.5, 0.9))
## Inference for Stan model: mod3_4.
## 2 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=2000.
##
## mean se_mean sd 10% 50% 90% n_eff Rhat
## beta_age 0.46 0.00 0.03 0.43 0.46 0.50 1310 1.00
## beta_age2 -0.09 0.00 0.02 -0.11 -0.09 -0.07 2033 1.00
## alpha 1.06 0.07 1.27 -0.39 1.01 2.55 312 1.01
## sigma_y 0.40 0.00 0.01 0.38 0.40 0.41 1280 1.00
## sigma_block 3.94 1.15 2.98 0.08 3.89 7.49 7 1.37
## alpha_block[1] 3.62 1.06 2.26 -0.05 4.35 5.98 5 1.81
## alpha_block[2] 3.65 1.06 2.26 -0.03 4.38 6.00 5 1.81
## alpha_block[3] 3.66 1.06 2.26 -0.02 4.40 6.02 5 1.81
## alpha_block[4] 3.76 1.06 2.26 0.07 4.49 6.12 5 1.81
## z_u[1,1] 0.79 0.02 0.57 0.10 0.76 1.49 867 1.00
## z_u[1,2] 0.23 0.17 0.63 -0.62 0.29 0.99 14 1.17
## z_u[1,3] -0.62 0.43 0.99 -1.80 -0.76 0.82 5 1.57
## z_u[1,4] 0.25 0.16 0.71 -0.71 0.34 1.09 19 1.12
## z_u[1,5] 0.34 0.13 0.63 -0.48 0.40 1.10 25 1.10
## z_u[2,1] -0.31 0.02 0.86 -1.40 -0.33 0.81 1358 1.00
## z_u[2,2] -0.02 0.02 0.83 -1.04 -0.02 0.98 1725 1.00
## z_u[2,3] 0.18 0.02 0.85 -0.90 0.18 1.26 1470 1.00
## z_u[2,4] 0.49 0.02 0.90 -0.64 0.49 1.60 1306 1.00
## z_u[2,5] -0.24 0.02 0.87 -1.32 -0.27 0.88 1512 1.00
## sigma_u[1] 1.64 1.61 3.34 0.03 0.08 6.76 4 1.87
## sigma_u[2] 0.03 0.00 0.04 0.00 0.02 0.07 745 1.00
## L_u[1,1] 1.00 NaN 0.00 1.00 1.00 1.00 NaN NaN
## L_u[1,2] 0.00 NaN 0.00 0.00 0.00 0.00 NaN NaN
## L_u[2,1] -0.10 0.01 0.45 -0.69 -0.12 0.55 1439 1.00
## L_u[2,2] 0.88 0.00 0.14 0.68 0.93 1.00 1031 1.00
## u[1,1] 1.17 1.13 2.18 0.01 0.06 5.27 4 2.49
## u[1,2] 1.13 1.13 2.18 -0.04 0.02 5.17 4 2.48
## u[1,3] 1.04 1.12 2.16 -0.13 -0.05 5.10 4 2.48
## u[1,4] 1.13 1.13 2.18 -0.05 0.02 5.22 4 2.49
## u[1,5] 1.14 1.13 2.18 -0.03 0.03 5.20 4 2.49
## u[2,1] -0.01 0.00 0.03 -0.05 -0.01 0.01 1310 1.00
## u[2,2] 0.00 0.00 0.03 -0.03 0.00 0.03 1470 1.00
## u[2,3] 0.01 0.00 0.03 -0.02 0.00 0.04 1490 1.00
## u[2,4] 0.02 0.00 0.04 -0.01 0.01 0.07 1096 1.00
## u[2,5] -0.01 0.00 0.03 -0.05 0.00 0.02 1217 1.00
## y_rep[1] 318.25 3.04 134.14 174.06 292.30 498.06 1943 1.00
## y_rep[2] 218.86 2.14 93.20 118.61 199.17 337.72 1894 1.00
## y_rep[3] 455.50 4.65 193.59 247.34 417.20 705.32 1733 1.00
## y_rep[4] 311.37 2.91 129.55 173.35 288.58 476.31 1981 1.00
## y_rep[5] 214.67 2.01 88.13 120.61 201.69 325.57 1916 1.00
## y_rep[6] 239.08 2.06 95.87 134.31 222.42 367.77 2176 1.00
## y_rep[7] 209.07 2.00 88.86 113.90 190.29 326.28 1975 1.00
## y_rep[8] 309.91 2.82 126.20 175.09 286.47 477.34 2009 1.00
## y_rep[9] 215.69 1.99 90.94 117.42 198.58 342.88 2078 1.00
## y_rep[10] 362.50 3.34 152.40 200.72 332.64 552.16 2085 1.00
## y_rep[11] 454.15 4.29 183.27 251.91 424.80 699.62 1822 1.00
## y_rep[12] 711.57 6.79 291.45 396.26 654.83 1099.31 1844 1.00
## y_rep[13] 326.72 3.13 134.30 180.77 302.84 496.64 1841 1.00
## y_rep[14] 217.71 2.07 91.81 120.00 202.07 331.81 1962 1.00
## y_rep[15] 318.43 2.93 132.39 179.13 295.07 487.76 2040 1.00
## y_rep[16] 523.17 4.90 216.62 292.97 481.92 818.92 1953 1.00
## y_rep[17] 631.54 6.62 283.47 339.77 576.80 977.82 1833 1.00
## y_rep[18] 212.14 2.15 89.63 114.68 195.85 332.47 1740 1.00
## y_rep[19] 241.15 2.31 102.37 133.38 219.97 372.95 1958 1.00
## y_rep[20] 216.18 1.94 89.70 120.74 199.89 330.16 2137 1.00
## y_rep[21] 637.94 5.55 260.28 358.84 587.21 983.72 2203 1.00
## y_rep[22] 356.43 3.28 146.72 197.65 330.83 546.97 1996 1.00
## y_rep[23] 211.59 2.11 86.56 116.42 196.99 322.23 1687 1.00
## y_rep[24] 237.15 2.16 95.77 133.05 222.01 359.25 1964 1.00
## y_rep[25] 330.40 3.11 139.95 182.39 302.63 510.17 2025 1.00
## y_rep[26] 457.22 4.57 191.68 250.95 421.03 710.36 1762 1.00
## y_rep[27] 314.98 3.12 132.87 174.41 289.61 488.96 1810 1.00
## y_rep[28] 471.97 4.28 194.10 263.56 433.49 721.38 2052 1.00
## y_rep[29] 519.05 4.95 211.81 290.69 479.52 798.48 1831 1.00
## y_rep[30] 712.82 6.84 295.75 398.38 653.94 1100.98 1871 1.00
## y_rep[31] 221.30 2.19 92.42 122.27 205.69 340.65 1780 1.00
## y_rep[32] 618.55 5.74 259.15 335.59 575.23 957.40 2041 1.00
## y_rep[33] 214.63 2.06 91.31 117.79 196.40 337.14 1973 1.00
## y_rep[34] 214.67 2.04 89.71 119.72 198.78 328.50 1935 1.00
## y_rep[35] 653.90 6.36 274.94 359.42 604.88 1017.07 1868 1.00
## y_rep[36] 616.71 5.52 253.63 340.31 571.89 952.24 2110 1.00
## y_rep[37] 516.78 5.18 225.58 286.29 465.68 805.23 1898 1.00
## y_rep[38] 308.25 2.93 128.82 172.05 283.24 478.54 1937 1.00
## y_rep[39] 364.50 3.53 153.75 200.16 336.65 565.24 1902 1.00
## y_rep[40] 649.51 6.20 269.22 354.05 604.75 998.51 1888 1.00
## y_rep[41] 466.68 4.19 191.94 259.33 432.81 709.37 2103 1.00
## y_rep[42] 321.80 3.29 134.82 176.83 296.82 496.03 1684 1.00
## y_rep[43] 444.64 4.28 190.06 249.10 405.64 679.43 1976 1.00
## y_rep[44] 209.02 1.91 84.22 114.49 195.14 321.77 1942 1.00
## y_rep[45] 242.34 2.31 104.17 133.66 221.79 376.46 2036 1.00
## y_rep[46] 712.74 6.73 301.36 387.18 666.35 1095.20 2003 1.00
## y_rep[47] 319.54 3.10 129.74 175.87 295.92 489.91 1748 1.00
## y_rep[48] 474.22 4.55 197.90 261.07 441.62 724.91 1888 1.00
## y_rep[49] 324.66 3.01 133.33 178.06 297.83 505.28 1964 1.00
## y_rep[50] 521.45 4.83 220.20 294.83 474.66 815.58 2077 1.00
## y_rep[51] 471.15 4.27 189.43 266.35 441.12 718.32 1965 1.00
## y_rep[52] 617.95 6.51 258.01 346.75 563.05 974.27 1573 1.00
## y_rep[53] 237.95 2.48 100.79 133.82 221.39 364.29 1646 1.00
## y_rep[54] 719.31 6.68 293.74 407.06 661.24 1101.74 1936 1.00
## y_rep[55] 217.48 2.03 90.27 120.43 199.28 338.47 1986 1.00
## y_rep[56] 309.99 2.97 129.91 171.28 285.61 468.27 1909 1.00
## y_rep[57] 215.73 1.92 89.39 119.70 198.82 331.16 2178 1.00
## y_rep[58] 207.62 1.92 85.85 115.94 190.91 320.08 1999 1.00
## y_rep[59] 361.55 3.30 148.74 200.60 334.72 555.76 2035 1.00
## y_rep[60] 648.94 6.27 273.02 360.86 590.87 990.67 1894 1.00
## y_rep[61] 449.22 4.20 189.38 249.65 414.75 696.37 2031 1.00
## y_rep[62] 316.29 2.99 129.15 173.79 294.80 482.27 1868 1.00
## y_rep[63] 319.23 2.90 127.70 178.48 295.61 483.91 1936 1.00
## y_rep[64] 311.50 2.82 127.23 171.48 287.13 479.92 2040 1.00
## y_rep[65] 210.49 2.05 88.79 116.00 192.97 324.55 1871 1.00
## y_rep[66] 647.09 6.21 271.08 355.22 594.25 995.09 1908 1.00
## y_rep[67] 214.21 1.98 88.03 117.61 198.71 321.49 1969 1.00
## y_rep[68] 516.08 4.91 213.50 287.24 483.99 774.03 1893 1.00
## y_rep[69] 709.15 6.52 292.30 392.77 653.89 1102.38 2009 1.00
## y_rep[70] 472.29 4.26 195.91 264.39 435.73 720.39 2113 1.00
## y_rep[71] 447.51 4.01 176.50 253.24 417.30 674.15 1937 1.00
## y_rep[72] 220.39 2.12 91.86 122.15 204.42 334.73 1876 1.00
## y_rep[73] 240.09 2.34 100.05 134.26 218.77 374.36 1832 1.00
## y_rep[74] 359.64 3.45 148.38 198.85 333.74 557.23 1848 1.00
## y_rep[75] 629.18 6.05 266.92 349.28 582.57 951.11 1947 1.00
## y_rep[76] 326.82 3.05 135.72 179.91 299.81 507.91 1974 1.00
## y_rep[77] 448.47 4.17 187.75 246.62 416.19 687.87 2023 1.00
## y_rep[78] 207.71 1.95 84.50 116.33 193.04 317.03 1872 1.00
## y_rep[79] 241.70 2.27 99.70 134.42 222.74 371.65 1922 1.00
## y_rep[80] 310.23 2.77 123.99 176.90 289.91 466.34 2000 1.00
## y_rep[81] 214.74 1.94 86.15 119.72 200.75 323.03 1972 1.00
## y_rep[82] 213.79 2.03 87.36 119.73 197.46 329.42 1857 1.00
## y_rep[83] 530.83 4.91 226.34 290.86 492.00 810.67 2121 1.00
## y_rep[84] 632.15 5.96 272.83 343.43 579.93 987.80 2093 1.00
## y_rep[85] 316.68 2.85 127.07 177.44 295.01 478.60 1994 1.00
## y_rep[86] 476.08 4.36 197.45 263.32 441.55 733.78 2054 1.00
## y_rep[87] 728.54 7.10 313.30 396.89 674.07 1126.64 1946 1.00
## y_rep[88] 359.77 3.41 149.81 199.24 332.23 557.06 1935 1.00
## y_rep[89] 449.68 4.26 189.74 250.67 410.40 697.31 1983 1.00
## y_rep[90] 617.65 5.63 251.25 346.99 571.47 942.13 1993 1.00
## y_rep[91] 244.79 2.33 101.17 135.46 226.26 373.29 1878 1.00
## y_rep[92] 525.73 4.88 217.10 293.04 489.02 807.45 1978 1.00
## y_rep[93] 208.57 1.85 83.43 115.43 193.66 322.93 2024 1.00
## y_rep[94] 219.34 2.11 92.44 120.59 202.12 333.95 1928 1.00
## y_rep[95] 310.79 3.01 131.30 172.17 287.83 477.76 1906 1.00
## y_rep[96] 322.70 3.09 134.48 179.21 297.57 498.75 1898 1.00
## y_rep[97] 720.91 6.92 308.26 391.17 664.63 1115.32 1982 1.00
## y_rep[98] 361.32 3.59 153.96 198.43 331.21 563.28 1838 1.00
## y_rep[99] 215.02 1.89 85.54 123.39 200.53 323.81 2048 1.00
## y_rep[100] 358.83 3.26 146.92 198.98 331.24 551.86 2029 1.00
## y_rep[101] 620.10 5.50 255.61 341.60 572.75 949.87 2158 1.00
## y_rep[102] 472.39 4.46 199.84 264.83 432.95 729.72 2006 1.00
## y_rep[103] 635.02 6.30 263.90 350.27 587.13 979.05 1753 1.00
## y_rep[104] 322.51 2.94 133.43 180.03 302.84 485.64 2059 1.00
## y_rep[105] 207.14 2.00 85.09 115.45 192.81 315.05 1803 1.00
## y_rep[106] 312.33 2.90 129.23 173.56 288.68 489.67 1992 1.00
## y_rep[107] 717.51 6.62 302.23 393.69 658.70 1132.47 2085 1.00
## y_rep[108] 237.23 2.16 98.95 130.37 217.76 370.62 2098 1.00
## y_rep[109] 320.99 2.81 131.06 176.28 297.87 486.59 2175 1.00
## y_rep[110] 454.93 4.28 191.21 250.99 418.06 700.66 1993 1.00
## y_rep[111] 214.87 1.97 88.34 119.23 196.57 329.23 2010 1.00
## y_rep[112] 653.61 6.33 277.84 354.69 601.79 1010.96 1926 1.00
## y_rep[113] 518.77 4.74 210.65 294.37 485.08 780.49 1979 1.01
## y_rep[114] 219.74 2.17 92.34 122.16 204.14 337.15 1808 1.00
## y_rep[115] 462.68 4.70 190.50 258.93 428.45 709.98 1641 1.00
## y_rep[116] 358.43 3.40 150.04 200.14 335.49 551.55 1949 1.00
## y_rep[117] 243.44 2.39 104.64 133.57 222.37 385.13 1923 1.00
## y_rep[118] 321.16 3.26 137.21 177.39 296.34 491.96 1774 1.00
## y_rep[119] 644.41 5.87 257.71 362.14 598.30 987.52 1930 1.00
## y_rep[120] 465.86 4.48 196.56 250.84 432.42 720.90 1928 1.00
## y_rep[121] 520.99 5.06 218.86 290.87 480.41 811.67 1870 1.00
## y_rep[122] 211.56 2.02 89.29 117.76 194.88 330.44 1952 1.00
## y_rep[123] 713.70 6.48 300.65 400.20 648.72 1102.75 2155 1.00
## y_rep[124] 307.58 2.82 128.10 172.53 283.42 470.82 2062 1.00
## y_rep[125] 216.37 2.25 89.45 121.36 200.68 330.42 1583 1.00
## y_rep[126] 218.06 2.13 95.46 118.19 199.19 343.67 2015 1.00
## y_rep[127] 239.23 2.07 93.48 137.51 225.09 362.74 2047 1.00
## y_rep[128] 715.92 6.94 306.49 394.97 662.89 1095.95 1953 1.00
## y_rep[129] 458.43 4.20 189.61 251.32 424.64 704.81 2039 1.00
## y_rep[130] 210.35 2.05 89.06 119.12 191.37 323.05 1886 1.00
## y_rep[131] 321.71 2.93 133.73 177.23 295.67 504.35 2082 1.00
## y_rep[132] 356.11 3.26 152.42 194.29 323.77 549.90 2183 1.00
## y_rep[133] 307.83 2.99 126.93 171.41 282.30 471.23 1797 1.00
## y_rep[134] 323.75 3.06 132.02 179.09 300.82 493.99 1863 1.00
## y_rep[135] 214.05 2.12 90.70 117.91 197.04 326.86 1832 1.00
## y_rep[136] 526.42 5.04 213.68 291.52 489.10 801.27 1797 1.00
## y_rep[137] 646.35 6.03 255.75 363.08 600.31 981.78 1800 1.00
## y_rep[138] 468.02 4.08 190.52 263.67 428.48 726.18 2176 1.00
## y_rep[139] 220.19 2.08 92.24 119.23 202.87 340.23 1964 1.00
## y_rep[140] 639.36 5.93 269.51 353.30 588.10 984.49 2069 1.00
## y_rep[141] 353.11 3.55 147.00 194.54 330.71 532.51 1711 1.00
## y_rep[142] 322.66 3.03 128.70 184.55 299.46 492.17 1810 1.00
## y_rep[143] 639.46 5.90 260.52 365.00 590.07 971.36 1950 1.00
## y_rep[144] 216.57 2.08 92.36 120.32 197.85 335.45 1978 1.00
## y_rep[145] 208.98 1.94 88.63 115.75 191.07 320.79 2095 1.00
## y_rep[146] 320.36 2.78 131.79 178.89 293.06 488.53 2253 1.00
## y_rep[147] 442.54 3.99 179.67 245.17 413.10 676.22 2024 1.00
## y_rep[148] 237.40 2.13 93.87 132.76 221.69 356.01 1934 1.00
## y_rep[149] 724.98 6.51 299.98 399.46 669.43 1114.90 2121 1.00
## y_rep[150] 308.97 2.96 125.80 174.43 286.84 476.14 1805 1.00
## y_rep[151] 467.63 4.35 192.40 253.65 431.49 711.69 1952 1.00
## y_rep[152] 216.53 2.11 90.96 118.89 199.39 334.61 1860 1.00
## y_rep[153] 613.60 5.64 245.00 341.33 571.54 945.48 1886 1.00
## y_rep[154] 520.53 5.01 218.62 281.65 479.65 820.23 1906 1.00
## y_rep[155] 362.97 3.54 151.78 199.07 333.80 565.05 1834 1.00
## y_rep[156] 215.42 2.01 88.79 118.95 199.21 333.39 1956 1.00
## y_rep[157] 659.82 6.14 278.78 354.77 612.60 999.53 2061 1.00
## y_rep[158] 213.76 2.09 90.10 120.10 196.72 331.23 1850 1.00
## y_rep[159] 722.70 7.05 297.43 405.94 672.20 1112.92 1779 1.00
## y_rep[160] 310.10 2.84 128.76 170.56 285.47 483.11 2055 1.00
## y_rep[161] 458.09 4.25 190.55 255.23 423.27 703.73 2008 1.00
## y_rep[162] 475.98 4.65 204.63 259.72 436.37 738.89 1936 1.00
## y_rep[163] 241.41 2.24 99.37 131.92 223.85 368.29 1975 1.00
## y_rep[164] 620.05 5.53 257.35 345.14 570.38 964.76 2169 1.00
## y_rep[165] 527.89 5.27 221.90 291.06 486.59 812.98 1772 1.00
## y_rep[166] 322.56 2.83 129.15 182.54 300.28 490.96 2077 1.00
## y_rep[167] 211.34 2.12 87.55 118.02 194.95 329.03 1700 1.00
## y_rep[168] 312.77 2.86 130.68 170.19 289.07 478.97 2088 1.00
## y_rep[169] 209.08 1.99 84.54 114.27 194.58 323.63 1798 1.00
## y_rep[170] 219.68 1.96 89.02 118.72 204.96 333.57 2060 1.00
## y_rep[171] 641.09 5.87 262.82 360.10 594.73 966.73 2002 1.00
## y_rep[172] 635.18 5.77 254.37 354.24 590.83 973.13 1944 1.00
## y_rep[173] 214.88 1.92 86.71 121.18 200.44 326.04 2043 1.00
## y_rep[174] 240.05 2.24 96.78 133.76 222.93 366.74 1874 1.00
## y_rep[175] 468.40 4.24 189.51 257.56 431.68 715.76 1995 1.00
## y_rep[176] 319.38 3.09 130.54 179.51 294.45 486.78 1789 1.00
## y_rep[177] 305.82 2.72 122.12 172.12 286.03 465.16 2017 1.00
## y_rep[178] 319.84 3.13 138.36 176.67 292.98 495.06 1949 1.00
## y_rep[179] 469.43 4.34 193.61 266.77 429.89 721.73 1987 1.00
## y_rep[180] 361.21 3.54 156.15 198.74 329.89 555.92 1946 1.00
## y_rep[181] 215.58 2.07 90.14 116.10 199.80 330.62 1900 1.00
## y_rep[182] 657.78 6.65 275.90 371.31 601.48 1005.69 1723 1.00
## y_rep[183] 477.21 4.38 194.19 265.46 441.48 732.20 1967 1.00
## y_rep[184] 218.74 2.10 92.37 119.29 198.81 341.07 1930 1.00
## y_rep[185] 237.58 2.12 95.77 130.74 220.63 360.76 2046 1.00
## y_rep[186] 324.27 3.09 134.28 180.31 299.42 502.26 1884 1.00
## y_rep[187] 208.81 1.92 84.02 117.73 194.18 313.29 1919 1.00
## y_rep[188] 358.20 3.27 147.59 197.51 330.93 549.53 2033 1.00
## y_rep[189] 239.97 2.15 101.07 130.22 221.96 372.82 2216 1.00
## y_rep[190] 207.85 1.93 87.70 113.76 193.34 317.79 2057 1.00
## y_rep[191] 657.61 6.22 275.20 364.44 608.92 1025.74 1957 1.00
## y_rep[192] 319.80 2.76 127.65 181.16 299.74 487.02 2143 1.00
## y_rep[193] 311.34 2.81 124.23 177.75 285.48 472.72 1958 1.00
## y_rep[194] 635.95 6.27 266.91 350.74 597.96 976.23 1812 1.00
## y_rep[195] 218.44 2.25 95.39 119.70 199.64 337.34 1792 1.00
## y_rep[196] 320.44 3.01 132.78 175.62 294.07 497.67 1940 1.00
## y_rep[197] 215.77 2.09 93.71 118.53 198.37 328.18 2020 1.00
## y_rep[198] 468.21 4.28 192.19 258.65 437.01 715.73 2017 1.00
## y_rep[199] 723.95 7.02 306.12 392.97 665.33 1130.46 1902 1.00
## y_rep[200] 515.63 4.91 213.65 282.05 473.65 799.72 1896 1.00
## y_rep[201] 470.40 4.55 196.02 260.91 433.39 729.99 1858 1.00
## y_rep[202] 352.93 3.39 145.32 197.42 324.65 540.73 1842 1.00
## y_rep[203] 324.89 3.12 136.59 180.18 298.66 500.49 1919 1.00
## y_rep[204] 451.95 4.67 192.49 253.62 418.24 682.54 1701 1.00
## y_rep[205] 236.54 2.45 97.98 130.08 220.47 356.70 1596 1.00
## y_rep[206] 463.01 4.29 189.13 254.93 429.54 722.57 1945 1.00
## y_rep[207] 355.09 3.34 147.01 193.90 329.43 550.12 1935 1.00
## y_rep[208] 216.45 2.04 88.39 122.20 200.10 330.08 1872 1.00
## y_rep[209] 311.54 2.94 129.29 172.18 284.85 487.63 1930 1.00
## y_rep[210] 645.33 6.03 260.37 354.55 605.79 995.49 1863 1.00
## y_rep[211] 637.64 6.04 269.47 350.40 591.70 969.49 1987 1.00
## y_rep[212] 514.05 4.64 206.89 282.53 484.13 778.24 1988 1.00
## y_rep[213] 220.43 2.06 89.00 123.21 204.45 336.74 1869 1.00
## y_rep[214] 476.42 4.52 199.89 264.71 442.41 727.93 1956 1.00
## y_rep[215] 207.07 1.93 85.14 116.40 189.38 322.52 1955 1.00
## y_rep[216] 323.61 3.52 135.86 180.05 297.14 502.60 1491 1.00
## y_rep[217] 627.64 5.65 256.62 352.35 584.24 952.04 2064 1.00
## y_rep[218] 619.46 5.98 261.45 343.13 568.43 944.18 1910 1.00
## y_rep[219] 206.86 1.90 85.62 113.38 191.57 321.57 2030 1.00
## y_rep[220] 466.51 4.33 198.12 247.87 426.10 716.28 2090 1.00
## y_rep[221] 323.68 3.10 136.58 186.38 296.30 504.64 1947 1.00
## y_rep[222] 220.11 2.10 92.22 123.26 203.41 331.87 1937 1.00
## y_rep[223] 308.43 2.89 127.60 171.15 284.70 471.53 1947 1.00
## y_rep[224] 357.41 3.36 143.82 204.03 332.38 542.85 1834 1.00
## y_rep[225] 459.55 4.34 194.10 257.15 420.02 710.36 2004 1.00
## y_rep[226] 215.19 2.07 92.37 118.60 197.66 333.29 1992 1.00
## y_rep[227] 239.92 2.32 102.92 129.18 219.47 378.98 1962 1.00
## y_rep[228] 468.56 4.27 187.69 260.96 440.41 719.06 1929 1.00
## y_rep[229] 638.33 6.12 261.65 356.85 587.83 975.82 1828 1.00
## y_rep[230] 325.69 2.96 134.41 180.59 301.08 500.28 2063 1.00
## y_rep[231] 650.89 6.42 268.29 356.27 606.53 974.62 1748 1.00
## y_rep[232] 320.90 3.10 132.85 177.53 296.41 495.18 1836 1.00
## y_rep[233] 521.48 5.17 218.37 286.12 479.41 810.03 1782 1.00
## y_rep[234] 206.77 1.88 85.01 112.72 192.13 314.38 2054 1.00
## y_rep[235] 215.08 2.00 90.80 117.85 198.84 331.78 2057 1.00
## y_rep[236] 466.20 4.32 193.42 260.04 427.56 720.57 2004 1.00
## y_rep[237] 216.95 2.02 90.22 119.37 198.95 336.29 2003 1.00
## y_rep[238] 240.73 2.14 99.10 134.47 223.36 371.40 2142 1.00
## y_rep[239] 309.67 2.81 124.16 169.94 289.02 479.42 1955 1.00
## y_rep[240] 359.84 3.68 157.39 197.11 326.85 566.05 1834 1.00
## y_rep[241] 615.81 5.73 252.73 342.32 571.48 942.27 1942 1.00
## y_rep[242] 449.98 4.07 182.16 252.36 416.35 707.51 2002 1.00
## y_rep[243] 317.23 2.87 127.61 178.22 294.51 486.04 1973 1.00
## y_rep[244] 642.54 6.14 274.40 357.79 590.51 976.74 1999 1.00
## y_rep[245] 716.67 7.05 300.05 409.04 652.95 1128.74 1810 1.00
## y_rep[246] 516.46 4.85 205.07 290.71 480.83 788.63 1785 1.00
## y_rep[247] 323.45 3.08 134.05 172.99 299.85 501.80 1897 1.00
## y_rep[248] 728.10 6.97 307.86 395.64 676.19 1131.30 1950 1.00
## y_rep[249] 215.34 1.98 88.58 121.39 198.65 330.88 2010 1.00
## y_rep[250] 650.22 6.52 274.87 354.25 601.08 1002.10 1777 1.00
## y_rep[251] 208.81 2.00 87.09 116.02 192.01 320.30 1902 1.00
## y_rep[252] 329.08 3.11 138.09 176.16 305.75 509.75 1968 1.00
## y_rep[253] 472.54 4.55 202.98 259.19 436.77 722.75 1993 1.00
## y_rep[254] 469.16 4.64 199.82 256.27 432.63 727.44 1857 1.00
## y_rep[255] 303.88 2.83 126.20 167.42 281.45 466.33 1985 1.00
## y_rep[256] 221.17 2.09 92.17 122.57 203.48 340.17 1954 1.00
## y_rep[257] 633.22 6.33 266.98 341.98 582.07 971.24 1780 1.00
## y_rep[258] 622.38 6.13 268.81 341.02 567.22 967.04 1921 1.00
## y_rep[259] 241.87 2.34 103.01 134.28 221.73 369.82 1932 1.00
## y_rep[260] 455.71 4.17 185.21 254.75 424.69 690.35 1975 1.00
## y_rep[261] 356.93 3.23 146.16 202.98 330.31 549.88 2046 1.00
## y_rep[262] 469.19 4.92 195.86 263.44 432.70 724.38 1583 1.00
## y_rep[263] 642.78 6.14 274.46 354.14 588.30 1000.94 2000 1.00
## y_rep[264] 205.33 1.99 86.39 115.94 187.08 316.90 1888 1.00
## y_rep[265] 359.18 4.15 152.50 195.04 330.34 553.76 1351 1.00
## y_rep[266] 210.52 2.04 82.99 118.70 197.67 317.36 1647 1.00
## y_rep[267] 474.85 4.41 203.93 263.45 435.97 734.15 2134 1.00
## y_rep[268] 527.85 4.73 214.27 291.03 491.29 808.28 2055 1.00
## y_rep[269] 309.36 2.95 125.02 169.20 287.84 474.94 1790 1.00
## y_rep[270] 243.92 2.24 102.07 134.62 225.02 370.73 2070 1.00
## y_rep[271] 717.46 6.82 291.36 396.50 669.95 1089.67 1823 1.00
## y_rep[272] 217.36 2.13 91.69 119.29 200.87 332.85 1858 1.00
## y_rep[273] 649.39 6.30 266.12 360.41 599.37 992.05 1784 1.00
## y_rep[274] 314.87 2.87 129.23 177.24 292.86 479.45 2034 1.00
## y_rep[275] 325.17 3.16 134.52 181.12 303.46 504.34 1817 1.00
## y_rep[276] 209.88 2.07 84.99 119.01 194.81 322.22 1693 1.00
## y_rep[277] 358.07 3.53 148.78 199.30 333.16 548.23 1777 1.00
## y_rep[278] 238.51 2.18 97.98 134.91 219.15 366.01 2018 1.00
## y_rep[279] 620.76 5.87 260.91 345.26 571.41 950.71 1972 1.00
## y_rep[280] 217.06 2.01 92.10 118.51 200.87 338.54 2098 1.00
## y_rep[281] 311.08 3.23 129.84 174.59 283.46 485.30 1612 1.00
## y_rep[282] 327.27 3.05 134.96 180.23 301.90 510.03 1962 1.00
## y_rep[283] 322.51 3.08 137.23 176.82 294.17 511.09 1991 1.00
## y_rep[284] 451.68 4.18 189.38 246.41 415.71 695.73 2050 1.00
## y_rep[285] 214.72 2.09 90.77 119.33 197.09 337.21 1879 1.00
## y_rep[286] 718.75 6.99 305.25 398.78 656.69 1115.95 1905 1.00
## y_rep[287] 522.81 4.64 208.76 296.61 484.49 795.56 2024 1.00
## y_rep[288] 458.67 4.42 188.46 257.81 427.94 700.41 1819 1.00
## y_rep[289] 467.52 4.30 194.43 255.14 430.91 712.80 2041 1.00
## y_rep[290] 218.10 1.99 89.21 121.85 202.22 329.75 2001 1.00
## y_rep[291] 472.58 4.43 194.85 261.40 435.25 740.07 1938 1.00
## y_rep[292] 618.30 5.71 256.65 340.91 570.30 958.73 2022 1.00
## y_rep[293] 640.25 6.16 269.24 352.88 591.86 991.10 1909 1.00
## y_rep[294] 210.89 1.97 88.99 117.10 193.94 327.87 2036 1.00
## y_rep[295] 324.64 3.15 134.52 181.15 298.55 493.59 1824 1.00
## y_rep[296] 311.35 2.88 130.93 170.70 287.95 478.88 2073 1.00
## y_rep[297] 213.07 1.91 88.82 118.70 197.90 329.93 2159 1.00
## y_rep[298] 512.28 4.37 201.93 289.44 478.27 774.98 2138 1.00
## y_rep[299] 647.51 5.94 275.51 364.76 599.28 984.70 2149 1.00
## y_rep[300] 318.13 2.89 129.02 174.79 297.03 486.54 1986 1.00
## y_rep[301] 703.86 6.67 287.59 395.78 654.77 1071.71 1859 1.00
## y_rep[302] 355.01 3.54 148.74 199.15 322.05 546.69 1764 1.00
## y_rep[303] 242.57 2.36 101.32 132.81 225.13 373.58 1841 1.00
## y_rep[304] 199.82 1.93 84.78 111.66 184.53 303.43 1923 1.00
## y_rep[305] 304.59 2.93 126.38 169.28 281.14 462.44 1866 1.00
## y_rep[306] 203.02 1.83 81.73 114.96 186.98 307.80 2004 1.00
## y_rep[307] 295.25 2.75 118.55 167.70 273.88 448.88 1859 1.00
## y_rep[308] 226.21 1.98 91.00 125.57 211.86 341.98 2116 1.00
## y_rep[309] 307.42 2.90 126.27 173.94 285.16 472.36 1890 1.00
## y_rep[310] 451.19 4.96 186.29 249.41 420.33 687.48 1411 1.00
## y_rep[311] 196.21 1.81 83.82 110.67 179.73 298.44 2140 1.00
## y_rep[312] 308.03 2.84 126.91 172.37 285.31 469.91 2000 1.00
## y_rep[313] 225.46 2.24 96.05 122.80 207.43 346.62 1831 1.00
## y_rep[314] 452.02 4.23 189.40 254.33 414.34 697.38 2007 1.00
## y_rep[315] 206.29 1.79 82.84 117.46 191.16 314.46 2154 1.00
## y_rep[316] 197.87 1.82 81.08 109.39 182.38 302.23 1983 1.00
## y_rep[317] 441.50 4.04 183.17 247.83 403.89 675.75 2058 1.00
## y_rep[318] 204.05 2.02 85.98 112.13 187.85 316.27 1813 1.00
## y_rep[319] 300.83 2.75 124.64 171.92 276.86 459.02 2052 1.00
## y_rep[320] 291.72 2.70 118.63 160.35 269.62 445.20 1925 1.00
## y_rep[321] 636.39 6.16 272.05 345.79 583.99 982.35 1949 1.00
## y_rep[322] 603.14 5.85 260.32 335.49 552.64 930.41 1983 1.00
## y_rep[323] 205.04 2.02 83.85 113.88 189.76 314.93 1726 1.00
## y_rep[324] 689.86 6.65 280.07 383.75 640.67 1052.79 1772 1.00
## y_rep[325] 206.50 1.88 84.90 114.65 191.45 313.72 2044 1.00
## y_rep[326] 225.77 2.17 93.30 124.01 208.64 347.49 1850 1.00
## y_rep[327] 290.36 2.69 121.28 159.91 264.33 454.58 2030 1.00
## y_rep[328] 307.09 2.80 124.06 173.47 285.97 475.21 1960 1.00
## y_rep[329] 306.07 2.98 131.65 166.95 284.03 471.61 1956 1.00
## y_rep[330] 445.65 4.33 182.91 249.02 413.83 685.10 1781 1.00
## y_rep[331] 619.94 5.61 256.49 334.94 573.46 949.76 2090 1.00
## y_rep[332] 195.25 1.81 80.56 104.37 179.71 300.02 1985 1.00
## y_rep[333] 346.18 3.40 154.71 185.94 319.04 535.44 2070 1.00
## y_rep[334] 499.13 4.91 216.54 275.13 454.07 775.92 1945 1.00
## y_rep[335] 499.73 4.65 206.29 280.58 463.26 755.81 1969 1.00
## y_rep[336] 630.77 6.19 269.73 343.84 585.38 976.58 1902 1.00
## y_rep[337] 710.35 6.52 291.49 385.75 659.87 1095.89 2001 1.00
## y_rep[338] 205.52 1.91 84.00 116.74 189.21 315.11 1928 1.00
## y_rep[339] 312.14 2.96 133.08 172.48 287.92 486.31 2024 1.00
## y_rep[340] 196.07 1.89 81.80 109.54 182.08 297.47 1869 1.00
## y_rep[341] 201.41 1.86 84.56 111.22 185.36 309.35 2061 1.00
## y_rep[342] 228.24 2.10 95.10 127.03 209.95 350.65 2048 1.00
## y_rep[343] 343.63 3.20 142.24 190.14 322.15 525.20 1978 1.00
## y_rep[344] 455.88 4.09 183.95 255.20 422.23 695.52 2020 1.00
## y_rep[345] 196.57 1.82 82.76 108.16 181.06 303.13 2062 1.00
## y_rep[346] 299.21 2.89 127.59 164.80 273.67 460.96 1943 1.00
## y_rep[347] 199.97 1.82 79.03 112.43 187.10 303.61 1886 1.00
## y_rep[348] 305.84 2.89 125.08 167.98 283.22 475.23 1872 1.00
## y_rep[349] 635.17 6.16 266.93 348.97 580.65 976.05 1879 1.00
## y_rep[350] 293.65 2.73 122.59 163.28 270.83 448.94 2023 1.00
## y_rep[351] 456.17 4.28 184.76 249.82 426.06 695.84 1864 1.00
## y_rep[352] 497.35 4.79 206.55 278.03 459.21 769.48 1859 1.00
## y_rep[353] 342.32 3.25 143.40 189.66 314.46 539.07 1952 1.00
## y_rep[354] 201.40 1.87 84.65 112.14 187.78 303.62 2050 1.00
## y_rep[355] 702.59 6.83 292.81 390.05 648.91 1081.09 1836 1.00
## y_rep[356] 618.94 5.71 250.95 340.91 573.88 956.58 1931 1.00
## y_rep[357] 227.39 2.17 96.31 123.14 208.45 356.41 1966 1.00
## y_rep[358] 451.06 4.40 193.13 243.52 414.66 698.51 1924 1.00
## y_rep[359] 632.64 5.79 256.55 356.67 592.82 951.82 1966 1.00
## y_rep[360] 445.02 4.31 189.71 243.98 406.30 687.20 1937 1.00
## y_rep[361] 224.66 2.14 91.58 127.77 208.22 351.12 1829 1.00
## y_rep[362] 506.89 4.80 213.35 281.95 467.89 781.22 1974 1.00
## y_rep[363] 307.39 3.14 128.92 170.53 281.80 474.40 1691 1.00
## y_rep[364] 203.11 1.83 80.90 113.26 189.58 318.71 1959 1.00
## y_rep[365] 290.17 2.63 121.18 165.24 272.10 439.40 2122 1.00
## y_rep[366] 202.55 1.96 84.64 113.17 187.55 314.58 1862 1.00
## y_rep[367] 695.00 6.84 303.60 378.02 640.69 1075.40 1971 1.00
## y_rep[368] 626.80 5.73 259.09 348.42 580.49 954.11 2043 1.00
## y_rep[369] 196.61 1.81 79.45 111.75 181.80 299.03 1934 1.00
## y_rep[370] 342.16 3.44 150.60 187.67 313.78 537.02 1916 1.00
## y_rep[371] 450.70 4.03 179.03 254.28 421.25 684.97 1973 1.00
## y_rep[372] 305.99 2.83 125.75 170.69 286.21 467.94 1976 1.00
## y_rep[373] 207.78 1.94 86.98 113.23 191.18 323.11 2019 1.00
## y_rep[374] 628.90 5.87 257.13 357.00 584.07 952.95 1919 1.00
## y_rep[375] 604.20 5.54 251.48 336.74 557.28 949.72 2063 1.00
## y_rep[376] 205.26 1.95 85.12 112.62 189.70 315.79 1910 1.00
## y_rep[377] 684.03 6.48 280.93 388.42 621.79 1064.53 1879 1.00
## y_rep[378] 196.82 1.82 82.65 108.95 181.82 301.74 2053 1.00
## y_rep[379] 461.93 4.51 194.54 256.27 423.74 721.53 1859 1.00
## y_rep[380] 295.22 2.84 124.84 161.01 269.67 450.92 1935 1.00
## y_rep[381] 311.49 2.89 131.18 170.14 287.80 483.20 2061 1.00
## y_rep[382] 503.17 5.02 211.24 284.04 467.01 759.20 1767 1.00
## y_rep[383] 439.86 4.13 185.33 241.98 407.32 674.42 2018 1.00
## y_rep[384] 336.47 3.16 140.42 186.19 310.29 524.49 1981 1.00
## y_rep[385] 227.53 2.15 94.53 125.14 210.80 352.95 1941 1.00
## y_rep[386] 301.60 3.06 124.98 168.35 277.71 463.98 1668 1.00
## y_rep[387] 293.46 2.93 123.06 159.66 270.67 456.53 1761 1.00
## y_rep[388] 228.85 2.11 97.87 125.28 213.04 354.90 2147 1.00
## y_rep[389] 603.68 5.73 247.47 344.41 552.48 919.73 1864 1.00
## y_rep[390] 619.90 5.76 252.41 342.25 578.90 948.47 1919 1.00
## y_rep[391] 494.66 4.69 200.37 273.98 461.17 756.21 1826 1.00
## y_rep[392] 307.25 2.80 124.69 171.13 282.38 463.91 1985 1.00
## y_rep[393] 452.54 4.42 190.23 244.67 419.53 703.76 1853 1.00
## y_rep[394] 441.63 4.07 186.80 244.31 407.59 667.64 2102 1.00
## y_rep[395] 203.78 1.95 85.92 112.23 188.98 316.34 1940 1.00
## y_rep[396] 196.62 1.94 82.97 109.05 180.88 301.58 1831 1.00
## y_rep[397] 205.09 1.96 85.82 114.09 190.31 314.07 1912 1.00
## y_rep[398] 346.85 3.49 142.29 186.70 326.76 534.88 1664 1.00
## y_rep[399] 225.69 2.11 93.35 124.98 210.41 344.88 1954 1.00
## y_rep[400] 194.84 1.80 78.83 109.38 180.81 300.10 1912 1.00
## y_rep[401] 699.35 6.60 293.95 383.99 652.38 1071.51 1984 1.00
## y_rep[402] 200.89 2.10 85.07 109.22 185.59 306.04 1636 1.00
## y_rep[403] 496.86 4.58 207.14 276.24 459.85 764.37 2046 1.00
## y_rep[404] 205.67 1.93 85.75 113.28 190.77 313.25 1971 1.00
## y_rep[405] 439.53 4.13 182.55 242.06 408.26 665.88 1958 1.00
## y_rep[406] 593.61 5.37 240.93 330.21 550.30 908.19 2013 1.00
## y_rep[407] 336.88 3.20 139.06 188.93 311.48 505.76 1892 1.00
## y_rep[408] 297.05 2.84 125.84 164.12 272.63 452.45 1969 1.00
## y_rep[409] 302.54 2.76 121.82 169.15 281.91 463.44 1950 1.00
## y_rep[410] 308.62 3.07 129.92 171.26 284.36 470.01 1796 1.00
## y_rep[411] 197.01 1.75 78.49 110.84 182.93 299.60 2002 1.00
## y_rep[412] 618.34 5.74 248.92 347.87 574.09 951.25 1878 1.00
## y_rep[413] 304.21 2.74 125.27 169.59 279.47 469.50 2083 1.00
## y_rep[414] 451.53 4.33 186.45 247.54 417.82 698.51 1852 1.00
## y_rep[415] 224.89 2.17 92.59 124.56 209.09 344.92 1826 1.00
## y_rep[416] 335.46 3.40 136.78 185.64 309.52 514.13 1622 1.00
## y_rep[417] 292.31 2.69 119.46 161.52 269.70 447.14 1974 1.00
## y_rep[418] 630.25 6.03 261.61 362.62 585.00 949.28 1884 1.00
## y_rep[419] 202.95 1.95 84.53 110.98 189.56 311.45 1883 1.00
## y_rep[420] 457.41 4.11 187.29 254.41 425.65 696.91 2076 1.00
## y_rep[421] 203.76 2.01 83.22 112.50 189.94 316.49 1712 1.00
## y_rep[422] 342.27 3.34 145.17 188.89 318.06 519.56 1894 1.00
## y_rep[423] 453.42 4.17 184.80 252.79 421.44 699.70 1963 1.00
## y_rep[424] 308.06 2.98 128.89 172.82 283.29 479.73 1871 1.00
## y_rep[425] 291.08 2.63 116.78 164.03 270.27 450.47 1970 1.00
## y_rep[426] 208.79 2.05 88.73 114.01 192.72 320.86 1873 1.00
## y_rep[427] 501.20 4.70 205.67 281.61 468.88 757.66 1913 1.00
## y_rep[428] 304.15 2.67 125.15 171.81 280.25 469.36 2198 1.00
## y_rep[429] 225.73 2.17 92.55 127.12 206.78 348.46 1820 1.00
## y_rep[430] 696.85 5.93 283.30 388.66 647.90 1061.50 2280 1.00
## y_rep[431] 195.85 1.78 81.35 111.28 181.65 304.19 2088 1.00
## y_rep[432] 451.39 4.33 193.31 251.54 415.62 688.43 1991 1.00
## y_rep[433] 626.80 5.97 258.28 347.62 580.78 965.33 1872 1.00
## y_rep[434] 204.28 1.98 87.69 111.43 188.66 313.01 1963 1.00
## y_rep[435] 620.92 5.84 256.81 340.43 574.30 957.84 1931 1.00
## y_rep[436] 295.45 2.90 123.98 166.07 270.64 458.13 1830 1.00
## y_rep[437] 500.53 4.88 214.39 272.33 457.30 781.60 1933 1.00
## y_rep[438] 197.29 1.87 82.22 108.36 183.66 298.06 1935 1.00
## y_rep[439] 228.13 2.15 94.51 126.34 211.05 353.01 1924 1.00
## y_rep[440] 201.88 1.91 84.55 111.81 185.69 312.09 1969 1.00
## y_rep[441] 691.58 6.48 284.43 386.31 638.50 1058.11 1926 1.00
## y_rep[442] 343.23 3.32 150.51 190.27 312.21 528.12 2057 1.00
## y_rep[443] 206.03 1.81 81.97 113.31 190.89 311.30 2048 1.00
## y_rep[444] 228.02 2.15 93.43 125.31 210.64 357.25 1887 1.00
## y_rep[445] 605.63 5.53 252.69 332.73 561.97 945.38 2087 1.00
## y_rep[446] 628.06 6.04 262.92 351.94 573.78 958.36 1895 1.00
## y_rep[447] 429.34 3.77 174.84 235.82 401.20 656.91 2152 1.00
## y_rep[448] 688.19 6.55 282.50 378.82 638.97 1036.67 1862 1.00
## y_rep[449] 500.72 4.69 206.41 282.09 462.99 760.45 1937 1.00
## y_rep[450] 291.62 2.69 117.88 165.79 270.76 444.90 1924 1.00
## y_rep[451] 305.06 2.85 124.96 170.32 283.68 466.81 1927 1.00
## y_rep[452] 457.55 4.40 193.35 255.63 419.09 710.47 1930 1.00
## y_rep[453] 207.88 1.86 84.15 117.33 192.19 319.60 2048 1.00
## y_rep[454] 300.63 2.86 127.47 163.12 273.63 469.81 1984 1.00
## y_rep[455] 202.41 1.89 84.08 110.30 188.54 308.44 1972 1.00
## y_rep[456] 199.11 1.97 80.35 110.23 184.35 307.64 1667 1.00
## y_rep[457] 334.28 2.94 134.94 186.35 308.48 513.79 2100 1.00
## y_rep[458] 446.62 4.21 186.34 251.22 414.62 670.14 1964 1.00
## y_rep[459] 198.10 2.03 81.42 107.60 184.99 307.85 1610 1.00
## y_rep[460] 207.86 2.03 88.76 113.52 193.05 320.89 1909 1.00
## y_rep[461] 298.88 2.78 125.90 163.76 275.95 458.30 2056 1.00
## y_rep[462] 225.45 2.09 90.67 126.56 208.73 343.43 1877 1.00
## y_rep[463] 203.86 1.86 85.19 112.83 186.78 319.33 2107 1.00
## y_rep[464] 304.90 2.84 125.65 171.32 282.61 465.55 1953 1.00
## y_rep[465] 203.43 1.96 87.36 109.32 186.98 318.28 1994 1.00
## y_rep[466] 195.53 1.81 83.67 109.59 181.97 298.80 2128 1.00
## y_rep[467] 228.04 2.12 93.41 124.95 211.77 353.02 1939 1.00
## y_rep[468] 500.95 4.62 207.83 276.31 463.06 771.19 2025 1.00
## y_rep[469] 311.16 2.86 132.13 168.35 285.18 485.54 2127 1.00
## y_rep[470] 298.49 2.86 124.83 166.22 273.57 463.36 1900 1.00
## y_rep[471] 342.27 3.25 144.82 186.98 316.56 531.90 1985 1.00
## y_rep[472] 697.38 6.64 292.14 385.69 640.43 1084.23 1938 1.00
## y_rep[473] 638.88 6.03 260.88 355.56 590.69 987.66 1873 1.00
## y_rep[474] 455.39 4.13 190.03 249.89 422.55 702.02 2119 1.00
## y_rep[475] 206.39 1.90 86.87 113.95 189.02 321.89 2088 1.00
## y_rep[476] 451.51 4.21 184.23 252.75 416.71 687.10 1917 1.00
## y_rep[477] 623.15 5.84 267.61 345.55 568.82 975.82 2101 1.00
## y_rep[478] 694.52 6.42 290.45 382.36 637.39 1089.38 2049 1.00
## y_rep[479] 208.18 2.11 89.96 115.12 190.94 318.73 1814 1.00
## y_rep[480] 341.50 3.10 140.24 189.81 314.03 530.49 2050 1.00
## y_rep[481] 228.48 2.09 94.87 127.43 211.57 354.23 2055 1.00
## y_rep[482] 205.72 1.92 86.10 115.50 190.13 314.78 2020 1.00
## y_rep[483] 195.79 1.80 81.56 106.82 181.30 302.33 2047 1.00
## y_rep[484] 306.38 2.72 124.66 171.42 284.00 466.83 2097 1.00
## y_rep[485] 495.49 4.55 202.85 277.49 461.44 754.35 1989 1.00
## y_rep[486] 306.17 3.20 131.10 168.48 283.28 466.38 1681 1.00
## y_rep[487] 629.22 6.10 268.79 343.10 581.11 974.60 1944 1.00
## y_rep[488] 225.72 2.32 97.08 125.34 204.89 349.78 1747 1.00
## y_rep[489] 451.78 4.43 185.08 249.71 418.55 694.21 1748 1.00
## y_rep[490] 339.04 3.09 138.35 184.80 314.96 526.05 2002 1.00
## y_rep[491] 452.07 4.16 181.01 257.91 416.54 687.35 1897 1.00
## y_rep[492] 494.61 4.67 204.91 274.28 451.87 759.98 1926 1.00
## y_rep[493] 203.24 1.95 84.95 111.94 188.82 313.30 1902 1.00
## y_rep[494] 194.02 1.79 79.01 108.58 179.55 295.15 1946 1.00
## y_rep[495] 291.80 2.74 123.28 162.99 266.56 454.53 2024 1.00
## y_rep[496] 694.53 6.56 287.98 384.19 643.80 1068.25 1926 1.00
## y_rep[497] 203.70 1.99 85.81 113.03 189.64 312.73 1864 1.00
## y_rep[498] 621.09 5.60 251.98 347.04 569.58 956.75 2028 1.00
## y_rep[499] 308.26 3.00 128.79 168.23 285.40 476.19 1848 1.00
## y_rep[500] 294.73 2.85 124.14 162.05 276.06 455.11 1900 1.00
## y_rep[501] 204.40 2.02 86.78 116.59 184.83 315.94 1851 1.00
## y_rep[502] 696.65 6.87 292.78 381.49 643.53 1070.84 1818 1.00
## y_rep[503] 498.59 4.68 211.95 283.87 449.96 770.03 2048 1.00
## y_rep[504] 452.21 4.44 184.88 252.21 419.61 686.24 1737 1.00
## y_rep[505] 621.16 5.81 254.87 340.24 571.51 965.64 1923 1.00
## y_rep[506] 202.74 1.94 83.81 112.76 186.26 311.55 1867 1.00
## y_rep[507] 339.38 3.21 140.60 189.64 313.57 522.20 1923 1.00
## y_rep[508] 226.78 2.14 94.71 126.81 208.60 347.09 1953 1.00
## y_rep[509] 308.81 2.89 126.17 169.75 283.53 483.66 1902 1.00
## y_rep[510] 304.73 2.76 123.13 170.18 284.28 466.19 1983 1.00
## y_rep[511] 194.77 1.76 78.64 109.64 181.65 297.39 2008 1.00
## y_rep[512] 635.53 5.93 261.79 351.65 589.13 985.94 1952 1.00
## y_rep[513] 207.48 1.87 84.08 116.69 190.28 316.67 2014 1.00
## y_rep[514] 314.15 2.93 131.41 172.79 290.25 484.19 2012 1.00
## y_rep[515] 283.08 2.42 111.36 156.82 264.25 434.55 2115 1.00
## y_rep[516] 585.49 5.48 238.33 327.36 548.05 903.87 1892 1.00
## y_rep[517] 179.64 1.73 73.75 99.19 165.61 280.70 1821 1.00
## y_rep[518] 424.46 3.90 173.84 233.90 390.38 656.80 1982 1.00
## y_rep[519] 185.73 1.86 81.69 102.62 167.82 292.75 1932 1.00
## y_rep[520] 419.15 3.80 176.22 229.46 384.51 648.67 2150 1.00
## y_rep[521] 188.20 1.91 81.43 103.65 171.54 291.85 1822 1.00
## y_rep[522] 598.30 5.69 247.66 326.01 552.16 931.65 1897 1.00
## y_rep[523] 277.96 2.69 113.05 156.28 254.93 423.04 1761 1.00
## y_rep[524] 644.27 6.20 270.84 348.45 597.33 1000.34 1909 1.00
## y_rep[525] 465.42 4.27 198.00 260.56 427.43 706.47 2147 1.00
## y_rep[526] 593.60 5.66 248.72 323.54 546.40 918.45 1933 1.00
## y_rep[527] 460.25 4.30 187.65 258.08 428.18 707.18 1907 1.00
## y_rep[528] 573.28 5.86 249.33 310.67 525.56 882.78 1811 1.00
## y_rep[529] 270.06 2.55 113.41 150.16 249.14 415.30 1973 1.00
## y_rep[530] 280.83 2.53 114.76 155.04 262.41 442.70 2055 1.00
## y_rep[531] 184.94 1.77 78.02 102.91 168.10 282.19 1939 1.00
## y_rep[532] 400.66 3.84 171.51 216.00 371.18 620.23 1996 1.00
## y_rep[533] 416.84 4.14 173.86 234.06 383.66 642.27 1764 1.00
## y_rep[534] 209.95 2.02 87.69 118.04 192.67 326.26 1887 1.00
## y_rep[535] 285.26 2.70 119.05 159.19 265.50 436.98 1938 1.00
## y_rep[536] 644.76 6.36 268.56 356.69 589.79 1015.84 1785 1.00
## y_rep[537] 181.84 1.67 74.34 102.18 168.43 276.80 1978 1.00
## y_rep[538] 311.63 3.01 129.09 173.69 288.48 476.17 1838 1.00
## y_rep[539] 188.39 1.79 78.69 103.67 173.21 292.46 1932 1.00
## y_rep[540] 464.92 4.31 195.36 257.71 425.97 717.07 2051 1.00
## y_rep[541] 403.15 3.61 165.69 225.53 372.50 620.64 2109 1.00
## y_rep[542] 564.60 5.16 238.48 309.89 520.16 868.82 2134 1.00
## y_rep[543] 427.33 4.19 182.28 233.77 393.48 662.70 1888 1.00
## y_rep[544] 279.72 2.67 116.77 155.05 256.00 433.31 1912 1.00
## y_rep[545] 185.71 1.77 77.23 100.58 172.25 282.66 1912 1.00
## y_rep[546] 181.33 1.79 78.35 97.73 167.17 286.44 1914 1.00
## y_rep[547] 283.98 2.64 119.44 155.45 263.76 437.43 2041 1.00
## y_rep[548] 418.70 3.93 173.07 230.95 390.36 636.75 1943 1.00
## y_rep[549] 267.74 2.50 109.84 148.74 247.31 412.04 1935 1.00
## y_rep[550] 602.45 5.74 258.17 325.05 559.72 928.98 2021 1.00
## y_rep[551] 586.59 5.47 245.32 327.25 535.83 898.08 2012 1.00
## y_rep[552] 185.40 1.71 75.95 103.67 172.18 286.37 1974 1.00
## y_rep[553] 663.91 6.48 282.52 360.97 614.50 1023.20 1903 1.01
## y_rep[554] 314.07 2.92 127.95 173.73 292.80 477.78 1921 1.00
## y_rep[555] 208.78 2.11 88.41 116.37 194.49 321.11 1751 1.00
## y_rep[556] 281.16 2.59 115.51 157.17 260.41 434.27 1988 1.00
## y_rep[557] 186.52 1.77 77.01 100.61 173.65 291.46 1891 1.00
## y_rep[558] 400.86 4.06 172.91 222.51 363.02 618.23 1811 1.00
## y_rep[559] 205.97 1.92 87.58 112.36 191.22 316.55 2083 1.00
## y_rep[560] 176.75 1.68 72.62 97.83 162.61 273.38 1874 1.00
## y_rep[561] 570.66 5.55 245.03 311.78 518.59 890.16 1946 1.00
## y_rep[562] 589.57 5.04 229.95 318.86 550.96 899.43 2081 1.00
## y_rep[563] 319.61 2.93 135.04 176.01 295.32 491.52 2129 1.00
## y_rep[564] 416.00 3.74 164.92 234.99 386.07 631.91 1948 1.00
## y_rep[565] 269.64 2.58 112.26 148.43 249.69 411.97 1896 1.00
## y_rep[566] 187.33 1.82 79.06 100.07 171.37 293.49 1897 1.00
## y_rep[567] 281.99 2.76 122.07 154.47 259.09 426.93 1950 1.00
## y_rep[568] 186.66 1.66 75.25 104.17 172.54 285.45 2046 1.00
## y_rep[569] 569.99 5.33 237.32 314.82 524.87 870.22 1986 1.00
## y_rep[570] 280.99 2.77 114.16 158.01 262.30 423.54 1695 1.00
## y_rep[571] 423.61 4.03 181.92 229.87 385.50 666.76 2040 1.00
## y_rep[572] 594.64 5.88 246.83 328.46 545.63 903.41 1765 1.00
## y_rep[573] 281.11 2.68 118.15 157.77 258.42 439.50 1950 1.00
## y_rep[574] 422.17 3.90 180.47 226.81 387.25 655.77 2142 1.00
## y_rep[575] 463.92 4.18 189.20 256.27 427.88 706.87 2053 1.00
## y_rep[576] 205.46 1.84 85.09 113.47 189.17 324.11 2150 1.00
## y_rep[577] 270.39 2.58 115.08 147.36 251.42 418.85 1990 1.00
## y_rep[578] 309.08 2.81 127.29 171.25 288.13 473.56 2049 1.00
## y_rep[579] 649.25 6.30 274.79 354.75 606.76 1022.17 1904 1.00
## y_rep[580] 183.10 1.78 78.43 97.41 169.16 285.78 1946 1.00
## y_rep[581] 579.90 5.79 247.08 320.37 533.51 887.80 1824 1.00
## y_rep[582] 402.50 3.76 167.24 219.59 375.15 613.71 1983 1.00
## y_rep[583] 188.96 1.86 79.96 102.85 174.41 292.70 1852 1.00
## y_rep[584] 183.80 1.69 74.46 102.67 171.60 279.20 1935 1.00
## y_rep[585] 277.28 2.50 115.08 151.03 257.55 427.20 2115 1.00
## y_rep[586] 207.66 1.92 85.77 113.31 193.84 315.29 2001 1.00
## y_rep[587] 468.71 4.24 191.03 262.04 430.10 720.96 2029 1.00
## y_rep[588] 317.61 3.41 134.19 175.88 291.01 492.79 1553 1.00
## y_rep[589] 414.97 3.80 171.54 230.25 389.86 640.56 2036 1.00
## y_rep[590] 267.25 2.51 112.93 146.76 246.29 409.35 2029 1.00
## y_rep[591] 588.86 5.82 250.11 329.12 539.81 898.28 1850 1.00
## y_rep[592] 652.57 6.29 267.11 360.39 602.49 992.51 1805 1.00
## y_rep[593] 282.13 2.83 118.70 154.05 261.77 438.62 1762 1.00
## y_rep[594] 424.27 3.95 170.73 237.01 395.49 648.89 1868 1.00
## y_rep[595] 181.08 1.77 75.45 100.98 167.17 275.71 1810 1.00
## y_rep[596] 184.74 1.75 75.31 101.78 169.57 288.93 1845 1.00
## y_rep[597] 579.56 5.23 234.80 318.27 536.66 889.88 2015 1.00
## y_rep[598] 568.02 5.89 236.76 317.02 523.91 878.74 1616 1.00
## y_rep[599] 463.51 4.21 190.53 255.63 433.12 701.38 2051 1.00
## y_rep[600] 203.24 1.89 85.34 110.72 188.93 312.48 2049 1.00
## y_rep[601] 595.44 6.07 265.49 327.68 539.39 934.41 1912 1.00
## y_rep[602] 279.79 2.70 112.89 155.84 260.39 428.54 1744 1.00
## y_rep[603] 185.63 1.63 76.09 100.58 173.12 286.96 2177 1.00
## y_rep[604] 272.78 2.63 116.82 149.36 248.68 431.10 1973 1.00
## y_rep[605] 424.85 4.06 177.80 229.64 393.33 654.03 1922 1.00
## y_rep[606] 655.14 6.38 287.43 362.64 598.31 1016.81 2028 1.00
## y_rep[607] 177.10 1.79 74.06 99.76 162.61 278.10 1720 1.00
## y_rep[608] 398.30 3.88 165.82 222.08 369.06 607.32 1822 1.00
## y_rep[609] 184.61 1.80 77.94 103.08 170.79 280.65 1874 1.00
## y_rep[610] 304.26 2.96 125.27 171.78 282.10 468.69 1797 1.00
## y_rep[611] 676.23 6.14 276.47 377.75 623.03 1041.94 2029 1.00
## y_rep[612] 273.26 2.57 112.28 151.36 255.90 417.53 1906 1.00
## y_rep[613] 206.13 2.02 83.04 116.56 191.76 318.35 1694 1.01
## y_rep[614] 577.56 5.86 242.84 325.43 530.55 884.05 1719 1.00
## y_rep[615] 424.02 4.11 181.49 231.62 390.62 652.14 1949 1.00
## y_rep[616] 421.15 4.05 176.45 232.20 389.45 651.03 1894 1.00
## y_rep[617] 283.85 2.64 119.29 154.05 262.91 441.46 2046 1.00
## y_rep[618] 316.23 2.88 134.42 176.63 287.03 488.86 2173 1.00
## y_rep[619] 598.22 5.72 257.23 327.82 549.50 931.13 2025 1.00
## y_rep[620] 181.70 1.72 74.03 102.73 166.44 280.65 1855 1.00
## y_rep[621] 186.68 1.77 79.98 102.66 171.88 295.34 2037 1.00
## y_rep[622] 469.55 4.85 203.09 251.69 429.99 716.27 1751 1.00
## y_rep[623] 187.21 1.87 76.61 101.36 174.06 292.04 1686 1.00
## y_rep[624] 315.15 3.00 134.40 173.57 287.26 490.70 2008 1.00
## y_rep[625] 646.02 5.81 263.64 361.86 600.39 979.36 2062 1.00
## y_rep[626] 192.09 1.80 80.69 107.19 175.35 301.66 2007 1.00
## y_rep[627] 280.64 2.67 118.27 153.73 260.66 442.53 1968 1.00
## y_rep[628] 593.46 5.77 249.37 328.90 545.57 905.07 1867 1.00
## y_rep[629] 267.54 2.51 111.49 151.51 245.63 412.19 1968 1.00
## y_rep[630] 179.10 1.74 73.23 99.73 165.66 274.21 1771 1.00
## y_rep[631] 416.07 3.83 167.89 229.69 392.74 637.93 1922 1.00
## y_rep[632] 462.97 4.35 191.82 255.63 429.92 708.56 1940 1.00
## y_rep[633] 284.74 2.51 116.75 159.90 261.46 444.45 2161 1.00
## y_rep[634] 205.82 1.87 85.63 113.51 189.87 313.66 2105 1.00
## y_rep[635] 183.99 1.65 74.01 102.16 169.25 287.24 2004 1.00
## y_rep[636] 202.73 1.84 82.41 113.00 187.48 311.39 2004 1.00
## y_rep[637] 202.06 1.92 84.67 111.07 186.03 310.98 1936 1.00
## y_rep[638] 620.23 5.86 260.28 341.79 566.92 958.88 1975 1.00
## y_rep[639] 432.57 4.09 173.48 241.66 405.69 654.76 1801 1.00
## y_rep[640] 300.47 2.86 125.91 168.96 277.77 463.06 1940 1.00
## y_rep[641] 291.46 2.62 118.10 159.74 269.62 451.67 2038 1.00
## y_rep[642] 195.54 1.79 80.19 107.58 181.29 297.93 1996 1.00
## y_rep[643] 305.44 2.86 126.92 169.32 283.28 482.26 1975 1.00
## y_rep[644] 290.40 2.81 123.47 160.95 265.37 451.52 1932 1.00
## y_rep[645] 433.92 4.07 178.18 241.02 406.00 668.90 1916 1.00
## y_rep[646] 626.26 6.17 265.75 339.79 574.46 976.22 1858 1.00
## y_rep[647] 446.79 4.16 183.19 249.05 417.15 686.22 1942 1.00
## y_rep[648] 646.05 5.99 269.03 362.05 595.74 1003.52 2014 1.00
## y_rep[649] 298.55 2.67 118.60 167.30 282.45 458.76 1975 1.00
## y_rep[650] 649.67 6.24 269.21 359.79 603.32 992.21 1861 1.00
## y_rep[651] 725.53 6.89 309.11 397.06 666.68 1122.82 2013 1.00
## y_rep[652] 645.32 5.94 273.01 356.47 598.61 984.77 2114 1.00
## y_rep[653] 438.59 4.21 185.05 243.74 405.13 676.76 1933 1.00
## y_rep[654] 453.59 4.41 194.30 245.04 418.46 711.60 1944 1.00
## y_rep[655] 291.79 2.63 117.83 163.65 270.60 448.84 2005 1.00
## y_rep[656] 617.02 5.90 256.44 336.54 575.53 934.75 1892 1.00
## y_rep[657] 340.30 3.08 140.76 187.82 313.92 526.23 2088 1.00
## y_rep[658] 307.10 3.10 134.49 170.32 280.13 475.31 1887 1.00
## y_rep[659] 509.93 4.61 212.90 280.84 464.67 796.16 2133 1.00
## y_rep[660] 453.23 4.23 189.67 250.32 420.03 696.19 2011 1.00
## y_rep[661] 445.23 3.98 181.16 249.95 409.48 681.90 2076 1.00
## y_rep[662] 513.03 4.82 214.36 281.37 478.36 779.63 1975 1.00
## y_rep[663] 340.52 3.49 144.23 188.51 313.98 529.91 1704 1.00
## y_rep[664] 295.26 2.68 122.48 168.02 270.91 449.12 2090 1.00
## y_rep[665] 715.23 7.10 301.67 397.42 661.88 1087.34 1807 1.00
## y_rep[666] 626.00 5.67 258.71 345.98 572.32 961.35 2078 1.00
## y_rep[667] 194.94 1.75 78.48 107.89 179.42 301.01 2014 1.00
## y_rep[668] 305.10 2.78 124.96 170.89 282.29 463.91 2027 1.00
## y_rep[669] 658.43 6.51 276.09 363.36 607.02 1022.08 1799 1.00
## y_rep[670] 339.69 3.53 143.88 186.30 315.02 518.96 1662 1.00
## y_rep[671] 197.90 1.70 79.40 111.64 181.60 300.77 2172 1.00
## y_rep[672] 505.00 4.80 209.79 282.68 462.06 777.99 1914 1.00
## y_rep[673] 224.03 2.26 94.63 123.64 205.79 349.96 1758 1.00
## y_rep[674] 721.67 7.00 303.14 404.89 667.99 1111.46 1874 1.00
## y_rep[675] 460.14 4.40 195.44 256.75 426.30 696.63 1977 1.00
## y_rep[676] 203.46 1.93 85.84 111.76 186.17 318.18 1975 1.00
## y_rep[677] 199.32 1.93 85.79 106.86 183.25 311.39 1965 1.00
## y_rep[678] 506.90 4.63 203.26 277.59 477.43 773.66 1928 1.00
## y_rep[679] 307.79 2.87 132.59 169.44 281.31 477.66 2137 1.00
## y_rep[680] 200.89 1.92 84.30 109.53 186.40 309.39 1918 1.00
## y_rep[681] 652.48 6.26 269.28 366.59 603.43 991.02 1851 1.00
## y_rep[682] 456.61 4.46 190.50 251.91 422.11 702.39 1827 1.00
## y_rep[683] 226.65 2.16 96.95 121.93 209.89 348.68 2020 1.00
## y_rep[684] 192.89 1.71 77.59 108.05 179.30 291.49 2066 1.00
## y_rep[685] 333.50 3.13 136.57 186.01 308.57 520.21 1908 1.00
## y_rep[686] 731.08 6.71 302.70 401.05 677.40 1115.32 2033 1.00
## y_rep[687] 192.25 1.77 79.24 107.14 176.89 296.06 1997 1.00
## y_rep[688] 660.07 6.33 285.83 363.55 610.06 1021.44 2042 1.00
## y_rep[689] 306.86 2.94 128.45 171.34 283.63 464.59 1912 1.00
## y_rep[690] 224.86 2.05 94.27 123.28 208.00 345.43 2122 1.00
## y_rep[691] 203.26 1.96 88.17 112.52 186.32 313.03 2016 1.00
## y_rep[692] 290.49 2.97 123.33 161.18 266.80 445.89 1729 1.00
## y_rep[693] 201.21 1.88 89.06 107.54 183.80 322.88 2241 1.00
## y_rep[694] 460.22 4.05 184.39 257.78 424.41 707.56 2075 1.00
## y_rep[695] 300.72 2.68 122.33 166.59 280.87 462.86 2076 1.00
## y_rep[696] 651.57 6.04 269.58 356.63 605.66 1003.17 1995 1.00
## y_rep[697] 194.12 1.89 81.89 103.94 178.69 299.82 1882 1.00
## y_rep[698] 225.49 2.10 95.23 127.51 206.20 339.84 2048 1.00
## y_rep[699] 292.85 2.79 120.38 162.29 270.78 451.51 1856 1.00
## y_rep[700] 459.39 4.50 199.21 249.32 420.64 722.19 1964 1.00
## y_rep[701] 309.63 2.90 126.14 169.73 289.36 472.35 1893 1.00
## y_rep[702] 206.00 1.94 87.17 113.58 188.96 318.63 2017 1.00
## y_rep[703] 197.91 1.97 86.44 107.11 183.88 298.87 1934 1.00
## y_rep[704] 337.23 2.99 142.24 188.14 307.09 523.76 2266 1.00
## y_rep[705] 341.14 3.32 146.20 186.65 314.62 532.59 1934 1.00
## y_rep[706] 303.07 2.87 126.99 167.25 282.13 467.56 1960 1.00
## y_rep[707] 196.98 1.78 81.16 110.14 182.32 299.71 2073 1.00
## y_rep[708] 192.55 1.82 79.46 108.92 176.12 299.86 1916 1.00
## y_rep[709] 200.32 1.97 87.13 110.33 183.32 311.09 1948 1.00
## y_rep[710] 642.81 6.09 263.49 357.79 596.80 980.21 1870 1.00
## y_rep[711] 446.25 4.20 185.76 248.31 408.89 688.50 1956 1.00
## y_rep[712] 720.80 6.84 315.51 395.04 668.14 1108.81 2130 1.00
## y_rep[713] 221.19 2.09 92.24 122.59 202.09 338.85 1941 1.00
## y_rep[714] 289.41 2.64 116.75 159.18 270.13 440.49 1959 1.00
## y_rep[715] 502.24 4.34 204.16 279.41 464.84 765.42 2213 1.00
## y_rep[716] 311.91 2.92 132.56 173.90 287.89 483.75 2058 1.00
## y_rep[717] 302.21 2.80 123.31 172.58 279.42 462.99 1937 1.00
## y_rep[718] 451.41 4.18 184.57 249.24 420.32 686.40 1951 1.00
## y_rep[719] 311.01 2.66 123.06 171.75 293.18 471.58 2142 1.00
## y_rep[720] 205.55 2.03 85.51 114.76 188.87 320.36 1773 1.00
## y_rep[721] 496.65 4.44 193.71 280.65 459.96 758.82 1903 1.00
## y_rep[722] 341.41 3.17 140.86 185.22 317.32 526.77 1977 1.00
## y_rep[723] 228.77 2.20 95.06 126.18 210.27 350.06 1859 1.00
## y_rep[724] 456.62 4.30 195.25 256.43 418.27 700.61 2058 1.00
## y_rep[725] 611.62 6.08 255.27 344.21 563.84 929.14 1765 1.00
## y_rep[726] 201.92 1.90 83.72 111.70 184.86 314.87 1948 1.00
## y_rep[727] 210.01 1.97 89.38 114.92 194.64 322.48 2049 1.00
## y_rep[728] 690.92 6.84 286.20 380.95 638.28 1049.09 1750 1.00
## y_rep[729] 624.20 6.85 278.50 349.16 571.67 944.99 1652 1.00
## y_rep[730] 313.44 2.79 125.55 173.85 295.98 490.60 2030 1.00
## y_rep[731] 232.72 2.04 96.48 126.90 213.62 363.96 2229 1.00
## y_rep[732] 303.73 2.76 125.75 169.09 281.26 464.78 2073 1.00
## y_rep[733] 455.05 4.17 186.66 253.75 418.99 693.99 2004 1.00
## y_rep[734] 438.76 4.17 184.33 247.22 403.26 670.33 1956 1.00
## y_rep[735] 299.69 2.95 126.45 164.46 278.48 458.71 1834 1.00
## y_rep[736] 198.95 1.81 82.34 109.76 183.40 313.27 2071 1.00
## y_rep[737] 209.56 1.99 86.97 115.81 193.26 323.27 1911 1.00
## y_rep[738] 639.01 6.55 284.86 347.82 586.49 986.65 1893 1.00
## y_rep[739] 593.58 5.48 243.75 325.53 549.34 921.99 1978 1.00
## y_rep[740] 207.45 1.91 86.71 115.30 191.44 324.43 2064 1.00
## y_rep[741] 201.50 1.87 82.82 109.04 188.91 309.67 1969 1.00
## y_rep[742] 207.54 2.10 90.64 113.08 190.65 317.19 1857 1.00
## y_rep[743] 452.19 4.58 186.22 253.34 415.69 695.97 1655 1.00
## y_rep[744] 502.86 4.66 204.31 277.99 462.97 761.93 1919 1.00
## y_rep[745] 345.38 3.11 139.31 190.53 319.10 526.23 2013 1.00
## y_rep[746] 300.27 2.82 122.39 165.69 280.70 462.23 1885 1.00
## y_rep[747] 628.10 5.96 262.95 350.97 581.74 961.11 1946 1.00
## y_rep[748] 229.26 2.22 95.85 128.49 210.81 355.18 1860 1.00
## y_rep[749] 309.61 2.79 128.78 168.38 286.50 486.47 2129 1.00
## y_rep[750] 619.82 5.65 256.89 340.48 576.76 934.86 2069 1.00
## y_rep[751] 313.44 2.77 126.16 176.06 294.27 476.93 2072 1.00
## y_rep[752] 454.56 4.28 191.34 250.02 416.01 705.38 2002 1.00
## y_rep[753] 207.86 1.91 83.65 118.15 192.92 320.27 1911 1.00
## y_rep[754] 699.93 6.70 294.15 380.56 646.78 1078.34 1927 1.00
## y_rep[755] 232.22 2.30 94.15 132.30 215.87 351.98 1675 1.00
## y_rep[756] 643.47 6.21 280.55 350.93 589.93 997.00 2040 1.00
## y_rep[757] 346.89 3.47 147.12 187.59 318.57 542.02 1802 1.00
## y_rep[758] 300.79 2.96 126.97 167.66 277.76 461.98 1845 1.00
## y_rep[759] 308.69 3.09 123.75 178.25 287.98 460.61 1601 1.00
## y_rep[760] 442.21 4.54 187.80 243.60 403.00 685.72 1710 1.00
## y_rep[761] 198.36 1.87 81.08 110.57 184.28 306.82 1876 1.00
## y_rep[762] 602.08 5.84 255.71 330.47 554.69 942.17 1919 1.00
## y_rep[763] 443.74 4.00 180.29 249.89 409.54 687.49 2032 1.00
## y_rep[764] 693.40 6.55 285.34 387.80 643.33 1061.02 1899 1.00
## y_rep[765] 616.33 5.77 255.23 348.04 567.80 948.01 1957 1.00
## y_rep[766] 309.42 2.88 125.43 174.59 285.72 477.27 1894 1.00
## y_rep[767] 490.84 4.69 206.91 271.63 449.86 750.47 1949 1.00
## y_rep[768] 206.50 1.91 82.77 115.93 190.65 314.87 1870 1.00
## y_rep[769] 204.86 2.04 85.95 115.50 186.33 314.23 1782 1.00
## y_rep[770] 451.34 4.18 183.26 250.71 419.73 691.14 1920 1.00
## y_rep[771] 231.01 2.22 99.22 125.19 211.10 360.35 1991 1.00
## y_rep[772] 458.04 4.42 193.83 250.35 427.47 701.92 1922 1.00
## y_rep[773] 205.48 2.01 85.28 116.18 189.47 315.36 1804 1.00
## y_rep[774] 304.71 2.70 123.87 169.56 281.45 467.37 2109 1.00
## y_rep[775] 635.88 6.06 268.35 343.98 585.48 981.53 1963 1.00
## y_rep[776] 456.58 4.12 184.76 259.83 420.27 695.44 2010 1.00
## y_rep[777] 350.08 3.12 142.55 193.63 325.31 546.47 2082 1.00
## y_rep[778] 202.12 1.95 84.74 111.63 186.59 313.03 1891 1.00
## y_rep[779] 207.80 1.94 86.23 115.53 191.60 319.97 1970 1.00
## y_rep[780] 292.24 2.74 121.46 164.23 268.07 448.52 1969 1.00
## y_rep[781] 625.68 5.70 255.07 345.12 581.45 946.12 2002 1.00
## y_rep[782] 312.01 3.10 134.35 172.77 283.38 482.80 1877 1.00
## y_rep[783] 204.16 1.95 86.12 113.56 187.54 317.57 1955 1.00
## y_rep[784] 232.81 2.19 98.62 129.49 214.96 356.44 2019 1.00
## y_rep[785] 307.16 2.70 127.48 170.44 283.23 464.76 2222 1.00
## y_rep[786] 209.48 1.94 87.40 115.58 194.03 321.09 2033 1.00
## y_rep[787] 613.36 5.83 248.83 343.20 568.74 938.10 1820 1.00
## y_rep[788] 687.91 6.19 286.62 384.95 634.18 1055.25 2146 1.00
## y_rep[789] 296.88 3.12 127.61 158.97 273.78 458.61 1670 1.00
## y_rep[790] 206.29 1.84 80.88 116.12 192.43 311.50 1937 1.00
## y_rep[791] 448.12 4.39 188.33 247.06 415.78 683.83 1844 1.00
## y_rep[792] 441.12 4.22 186.23 240.86 400.98 693.92 1949 1.00
## y_rep[793] 490.56 4.75 198.48 267.09 458.28 756.27 1744 1.00
## y_rep[794] 341.92 3.25 144.36 191.48 315.11 529.25 1969 1.00
## y_rep[795] 230.45 2.10 95.03 127.46 214.77 352.66 2054 1.00
## y_rep[796] 498.31 5.00 206.56 278.42 465.22 765.30 1703 1.00
## y_rep[797] 309.64 2.87 129.45 171.97 288.36 463.31 2031 1.00
## y_rep[798] 620.67 5.80 257.98 339.30 571.06 981.05 1980 1.00
## y_rep[799] 213.25 1.84 87.48 117.09 196.09 331.06 2261 1.00
## y_rep[800] 460.36 4.28 187.16 257.68 428.85 708.95 1915 1.00
## y_rep[801] 206.36 1.89 84.87 113.69 192.16 313.08 2010 1.00
## y_rep[802] 441.29 3.89 173.55 246.34 414.45 677.00 1989 1.00
## y_rep[803] 600.17 5.60 245.30 331.48 560.76 933.03 1918 1.00
## y_rep[804] 617.02 5.78 259.91 348.09 568.34 958.33 2024 1.00
## y_rep[805] 341.69 2.94 137.55 190.83 318.91 516.61 2188 1.00
## y_rep[806] 440.85 4.08 183.43 241.41 408.58 683.53 2025 1.00
## y_rep[807] 197.45 1.89 83.55 106.92 181.28 306.60 1961 1.00
## y_rep[808] 308.05 2.82 125.36 172.60 285.69 473.27 1970 1.00
## y_rep[809] 301.39 2.76 125.10 169.64 277.45 453.73 2048 1.00
## y_rep[810] 294.20 2.72 124.03 160.38 271.06 460.09 2072 1.00
## y_rep[811] 433.85 3.96 181.29 239.34 398.43 660.59 2096 1.00
## y_rep[812] 308.24 2.93 130.02 172.87 281.17 480.09 1963 1.00
## y_rep[813] 616.59 5.81 257.40 342.02 571.91 952.24 1965 1.00
## y_rep[814] 633.57 5.95 272.77 348.14 577.57 985.03 2104 1.00
## y_rep[815] 202.58 1.99 86.20 112.43 186.04 310.77 1880 1.00
## y_rep[816] 703.28 6.67 300.11 389.85 648.03 1090.23 2024 1.00
## y_rep[817] 230.13 2.15 93.83 126.30 214.03 353.00 1903 1.00
## y_rep[818] 450.21 4.22 187.91 253.58 419.81 685.31 1979 1.00
## y_rep[819] 499.18 4.81 199.96 278.20 463.92 766.24 1729 1.00
## y_rep[820] 598.60 5.75 250.82 335.69 545.79 917.91 1900 1.00
## y_rep[821] 207.24 1.97 86.35 115.85 189.92 315.28 1920 1.00
## y_rep[822] 460.29 4.59 192.67 243.41 429.56 704.60 1759 1.00
## y_rep[823] 204.79 1.93 84.09 112.92 188.21 319.56 1893 1.00
## y_rep[824] 307.86 2.82 128.38 172.86 281.58 482.26 2072 1.00
## y_rep[825] 348.67 3.24 148.57 191.42 319.34 534.22 2101 1.00
## y_rep[826] 696.09 6.63 294.03 380.63 644.49 1052.22 1967 1.00
## y_rep[827] 234.55 2.14 95.07 131.41 217.55 358.39 1970 1.00
## y_rep[828] 309.62 2.81 125.18 174.78 287.23 470.61 1985 1.00
## y_rep[829] 448.66 4.20 190.43 242.96 414.32 687.79 2056 1.00
## y_rep[830] 298.91 2.73 122.87 165.36 277.54 464.56 2032 1.00
## y_rep[831] 459.78 4.43 193.66 257.37 424.02 704.31 1908 1.00
## y_rep[832] 501.43 5.06 208.57 277.81 460.61 774.28 1697 1.00
## y_rep[833] 209.72 1.99 86.54 114.37 193.95 325.32 1883 1.00
## y_rep[834] 304.11 2.77 128.62 170.44 277.66 475.94 2158 1.00
## y_rep[835] 336.15 3.07 138.07 188.27 309.97 516.41 2019 1.00
## y_rep[836] 200.27 1.90 83.16 110.88 185.06 314.96 1911 1.00
## y_rep[837] 623.52 5.68 248.26 348.12 575.60 963.92 1913 1.00
## y_rep[838] 622.16 6.29 263.73 344.08 571.61 964.81 1761 1.00
## y_rep[839] 209.27 1.91 86.17 119.67 194.42 316.56 2033 1.00
## y_rep[840] 314.21 3.03 132.89 170.77 288.53 492.52 1929 1.00
## y_rep[841] 230.33 2.33 97.01 126.42 212.34 358.00 1734 1.00
## y_rep[842] 612.02 5.77 247.27 341.73 568.90 935.21 1834 1.00
## y_rep[843] 626.87 5.84 261.05 348.38 582.50 952.20 1997 1.00
## y_rep[844] 342.35 3.16 139.96 193.38 316.62 520.06 1967 1.00
## y_rep[845] 460.47 4.71 194.18 253.27 423.22 709.78 1698 1.00
## y_rep[846] 600.78 5.73 255.22 332.74 553.69 908.61 1984 1.00
## y_rep[847] 202.95 1.96 85.24 112.31 186.36 313.00 1887 1.00
## y_rep[848] 434.96 3.83 173.35 246.01 405.18 664.56 2050 1.00
## y_rep[849] 702.09 6.87 300.68 393.22 639.04 1103.80 1916 1.00
## y_rep[850] 313.28 2.81 128.45 172.37 290.31 475.66 2092 1.00
## y_rep[851] 451.91 4.57 197.93 244.75 412.92 710.73 1879 1.00
## y_rep[852] 207.79 2.05 87.90 115.64 190.78 318.38 1847 1.00
## y_rep[853] 502.79 4.79 209.54 279.59 462.50 777.22 1917 1.00
## y_rep[854] 299.14 2.93 125.50 161.49 278.13 458.93 1835 1.00
## y_rep[855] 207.56 1.90 86.72 116.27 191.57 317.65 2077 1.00
## y_rep[856] 300.85 2.79 125.66 163.54 279.56 460.76 2028 1.00
## y_rep[857] 208.68 1.93 86.55 117.60 192.89 316.25 2008 1.00
## y_rep[858] 505.28 4.78 208.17 275.83 473.16 774.72 1900 1.00
## y_rep[859] 440.68 4.14 189.09 245.12 402.39 685.40 2084 1.00
## y_rep[860] 207.83 1.92 86.46 116.11 191.43 317.22 2036 1.00
## y_rep[861] 201.71 1.98 85.48 111.86 185.82 314.79 1869 1.00
## y_rep[862] 343.54 3.22 140.77 194.57 316.24 528.99 1908 1.00
## y_rep[863] 596.83 5.31 241.22 330.09 558.26 912.48 2067 1.00
## y_rep[864] 688.27 6.56 290.18 378.02 634.26 1055.25 1955 1.00
## y_rep[865] 235.24 2.29 98.12 129.10 217.63 363.25 1832 1.00
## y_rep[866] 511.48 5.01 214.24 286.70 470.26 783.57 1828 1.00
## y_rep[867] 619.86 6.10 264.75 342.49 569.87 956.09 1881 1.00
## y_rep[868] 212.70 2.04 89.26 116.57 197.61 328.36 1910 1.00
## y_rep[869] 310.08 3.09 135.94 168.62 284.87 479.26 1935 1.00
## y_rep[870] 632.38 6.36 258.83 348.02 587.37 961.16 1655 1.00
## y_rep[871] 690.78 6.88 291.12 375.13 639.15 1059.15 1790 1.00
## y_rep[872] 457.55 4.39 190.98 249.60 426.54 697.82 1893 1.00
## y_rep[873] 300.76 2.86 123.23 165.22 279.10 457.68 1860 1.00
## y_rep[874] 235.85 2.40 97.79 129.00 219.74 366.47 1657 1.00
## y_rep[875] 452.12 4.26 187.44 250.07 417.39 699.41 1937 1.00
## y_rep[876] 345.05 3.13 142.13 193.37 315.98 536.55 2057 1.00
## y_rep[877] 205.60 1.97 84.79 116.65 190.59 314.35 1860 1.00
## y_rep[878] 309.36 2.81 125.85 173.32 286.74 478.79 2003 1.00
## y_rep[879] 201.22 2.22 85.12 110.25 183.34 315.29 1468 1.00
## lp__ 354.90 4.70 9.69 345.46 351.93 372.37 4 2.00
##
## Samples were drawn using NUTS(diag_e) at Mon Apr 6 17:31:05 2020.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
y_rep <- as.matrix(fit.mod3_4, pars = "y_rep")
ppc_dens_overlay(y =data$height,y_rep[1:50, ]) + theme_bw() + theme(legend.text=element_text(size=25),
legend.title=element_text(size=18),
axis.text = element_text(size=18),
legend.position = c(0.8,0.6))
Comment: \(\alpha\) has very different values between McElreath and Sorensen models..